target-ppc: Add ISA2.06 divdeu[o] Instructions
[qemu-kvm.git] / target-ppc / translate.c
blob55f259b07f10f3adb789c1deb34d655e91921002
1 /*
2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "cpu.h"
22 #include "disas/disas.h"
23 #include "tcg-op.h"
24 #include "qemu/host-utils.h"
26 #include "helper.h"
27 #define GEN_HELPER 1
28 #include "helper.h"
30 #define CPU_SINGLE_STEP 0x1
31 #define CPU_BRANCH_STEP 0x2
32 #define GDBSTUB_SINGLE_STEP 0x4
34 /* Include definitions for instructions classes and implementations flags */
35 //#define PPC_DEBUG_DISAS
36 //#define DO_PPC_STATISTICS
38 #ifdef PPC_DEBUG_DISAS
39 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40 #else
41 # define LOG_DISAS(...) do { } while (0)
42 #endif
43 /*****************************************************************************/
44 /* Code translation helpers */
46 /* global register indexes */
47 static TCGv_ptr cpu_env;
48 static char cpu_reg_names[10*3 + 22*4 /* GPR */
49 #if !defined(TARGET_PPC64)
50 + 10*4 + 22*5 /* SPE GPRh */
51 #endif
52 + 10*4 + 22*5 /* FPR */
53 + 2*(10*6 + 22*7) /* AVRh, AVRl */
54 + 10*5 + 22*6 /* VSR */
55 + 8*5 /* CRF */];
56 static TCGv cpu_gpr[32];
57 #if !defined(TARGET_PPC64)
58 static TCGv cpu_gprh[32];
59 #endif
60 static TCGv_i64 cpu_fpr[32];
61 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
62 static TCGv_i64 cpu_vsr[32];
63 static TCGv_i32 cpu_crf[8];
64 static TCGv cpu_nip;
65 static TCGv cpu_msr;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 #if defined(TARGET_PPC64)
69 static TCGv cpu_cfar;
70 #endif
71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca;
72 static TCGv cpu_reserve;
73 static TCGv cpu_fpscr;
74 static TCGv_i32 cpu_access_type;
76 #include "exec/gen-icount.h"
78 void ppc_translate_init(void)
80 int i;
81 char* p;
82 size_t cpu_reg_names_size;
83 static int done_init = 0;
85 if (done_init)
86 return;
88 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
90 p = cpu_reg_names;
91 cpu_reg_names_size = sizeof(cpu_reg_names);
93 for (i = 0; i < 8; i++) {
94 snprintf(p, cpu_reg_names_size, "crf%d", i);
95 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
96 offsetof(CPUPPCState, crf[i]), p);
97 p += 5;
98 cpu_reg_names_size -= 5;
101 for (i = 0; i < 32; i++) {
102 snprintf(p, cpu_reg_names_size, "r%d", i);
103 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
104 offsetof(CPUPPCState, gpr[i]), p);
105 p += (i < 10) ? 3 : 4;
106 cpu_reg_names_size -= (i < 10) ? 3 : 4;
107 #if !defined(TARGET_PPC64)
108 snprintf(p, cpu_reg_names_size, "r%dH", i);
109 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
110 offsetof(CPUPPCState, gprh[i]), p);
111 p += (i < 10) ? 4 : 5;
112 cpu_reg_names_size -= (i < 10) ? 4 : 5;
113 #endif
115 snprintf(p, cpu_reg_names_size, "fp%d", i);
116 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
117 offsetof(CPUPPCState, fpr[i]), p);
118 p += (i < 10) ? 4 : 5;
119 cpu_reg_names_size -= (i < 10) ? 4 : 5;
121 snprintf(p, cpu_reg_names_size, "avr%dH", i);
122 #ifdef HOST_WORDS_BIGENDIAN
123 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
124 offsetof(CPUPPCState, avr[i].u64[0]), p);
125 #else
126 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
127 offsetof(CPUPPCState, avr[i].u64[1]), p);
128 #endif
129 p += (i < 10) ? 6 : 7;
130 cpu_reg_names_size -= (i < 10) ? 6 : 7;
132 snprintf(p, cpu_reg_names_size, "avr%dL", i);
133 #ifdef HOST_WORDS_BIGENDIAN
134 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
135 offsetof(CPUPPCState, avr[i].u64[1]), p);
136 #else
137 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
138 offsetof(CPUPPCState, avr[i].u64[0]), p);
139 #endif
140 p += (i < 10) ? 6 : 7;
141 cpu_reg_names_size -= (i < 10) ? 6 : 7;
142 snprintf(p, cpu_reg_names_size, "vsr%d", i);
143 cpu_vsr[i] = tcg_global_mem_new_i64(TCG_AREG0,
144 offsetof(CPUPPCState, vsr[i]), p);
145 p += (i < 10) ? 5 : 6;
146 cpu_reg_names_size -= (i < 10) ? 5 : 6;
149 cpu_nip = tcg_global_mem_new(TCG_AREG0,
150 offsetof(CPUPPCState, nip), "nip");
152 cpu_msr = tcg_global_mem_new(TCG_AREG0,
153 offsetof(CPUPPCState, msr), "msr");
155 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
156 offsetof(CPUPPCState, ctr), "ctr");
158 cpu_lr = tcg_global_mem_new(TCG_AREG0,
159 offsetof(CPUPPCState, lr), "lr");
161 #if defined(TARGET_PPC64)
162 cpu_cfar = tcg_global_mem_new(TCG_AREG0,
163 offsetof(CPUPPCState, cfar), "cfar");
164 #endif
166 cpu_xer = tcg_global_mem_new(TCG_AREG0,
167 offsetof(CPUPPCState, xer), "xer");
168 cpu_so = tcg_global_mem_new(TCG_AREG0,
169 offsetof(CPUPPCState, so), "SO");
170 cpu_ov = tcg_global_mem_new(TCG_AREG0,
171 offsetof(CPUPPCState, ov), "OV");
172 cpu_ca = tcg_global_mem_new(TCG_AREG0,
173 offsetof(CPUPPCState, ca), "CA");
175 cpu_reserve = tcg_global_mem_new(TCG_AREG0,
176 offsetof(CPUPPCState, reserve_addr),
177 "reserve_addr");
179 cpu_fpscr = tcg_global_mem_new(TCG_AREG0,
180 offsetof(CPUPPCState, fpscr), "fpscr");
182 cpu_access_type = tcg_global_mem_new_i32(TCG_AREG0,
183 offsetof(CPUPPCState, access_type), "access_type");
185 done_init = 1;
188 /* internal defines */
189 typedef struct DisasContext {
190 struct TranslationBlock *tb;
191 target_ulong nip;
192 uint32_t opcode;
193 uint32_t exception;
194 /* Routine used to access memory */
195 int mem_idx;
196 int access_type;
197 /* Translation flags */
198 int le_mode;
199 #if defined(TARGET_PPC64)
200 int sf_mode;
201 int has_cfar;
202 #endif
203 int fpu_enabled;
204 int altivec_enabled;
205 int vsx_enabled;
206 int spe_enabled;
207 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
208 int singlestep_enabled;
209 uint64_t insns_flags;
210 uint64_t insns_flags2;
211 } DisasContext;
213 /* True when active word size < size of target_long. */
214 #ifdef TARGET_PPC64
215 # define NARROW_MODE(C) (!(C)->sf_mode)
216 #else
217 # define NARROW_MODE(C) 0
218 #endif
220 struct opc_handler_t {
221 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
222 uint32_t inval1;
223 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
224 uint32_t inval2;
225 /* instruction type */
226 uint64_t type;
227 /* extended instruction type */
228 uint64_t type2;
229 /* handler */
230 void (*handler)(DisasContext *ctx);
231 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
232 const char *oname;
233 #endif
234 #if defined(DO_PPC_STATISTICS)
235 uint64_t count;
236 #endif
239 static inline void gen_reset_fpstatus(void)
241 gen_helper_reset_fpstatus(cpu_env);
244 static inline void gen_compute_fprf(TCGv_i64 arg, int set_fprf, int set_rc)
246 TCGv_i32 t0 = tcg_temp_new_i32();
248 if (set_fprf != 0) {
249 /* This case might be optimized later */
250 tcg_gen_movi_i32(t0, 1);
251 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
252 if (unlikely(set_rc)) {
253 tcg_gen_mov_i32(cpu_crf[1], t0);
255 gen_helper_float_check_status(cpu_env);
256 } else if (unlikely(set_rc)) {
257 /* We always need to compute fpcc */
258 tcg_gen_movi_i32(t0, 0);
259 gen_helper_compute_fprf(t0, cpu_env, arg, t0);
260 tcg_gen_mov_i32(cpu_crf[1], t0);
263 tcg_temp_free_i32(t0);
266 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
268 if (ctx->access_type != access_type) {
269 tcg_gen_movi_i32(cpu_access_type, access_type);
270 ctx->access_type = access_type;
274 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
276 if (NARROW_MODE(ctx)) {
277 nip = (uint32_t)nip;
279 tcg_gen_movi_tl(cpu_nip, nip);
282 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
284 TCGv_i32 t0, t1;
285 if (ctx->exception == POWERPC_EXCP_NONE) {
286 gen_update_nip(ctx, ctx->nip);
288 t0 = tcg_const_i32(excp);
289 t1 = tcg_const_i32(error);
290 gen_helper_raise_exception_err(cpu_env, t0, t1);
291 tcg_temp_free_i32(t0);
292 tcg_temp_free_i32(t1);
293 ctx->exception = (excp);
296 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
298 TCGv_i32 t0;
299 if (ctx->exception == POWERPC_EXCP_NONE) {
300 gen_update_nip(ctx, ctx->nip);
302 t0 = tcg_const_i32(excp);
303 gen_helper_raise_exception(cpu_env, t0);
304 tcg_temp_free_i32(t0);
305 ctx->exception = (excp);
308 static inline void gen_debug_exception(DisasContext *ctx)
310 TCGv_i32 t0;
312 if ((ctx->exception != POWERPC_EXCP_BRANCH) &&
313 (ctx->exception != POWERPC_EXCP_SYNC)) {
314 gen_update_nip(ctx, ctx->nip);
316 t0 = tcg_const_i32(EXCP_DEBUG);
317 gen_helper_raise_exception(cpu_env, t0);
318 tcg_temp_free_i32(t0);
321 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
323 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_INVAL | error);
326 /* Stop translation */
327 static inline void gen_stop_exception(DisasContext *ctx)
329 gen_update_nip(ctx, ctx->nip);
330 ctx->exception = POWERPC_EXCP_STOP;
333 /* No need to update nip here, as execution flow will change */
334 static inline void gen_sync_exception(DisasContext *ctx)
336 ctx->exception = POWERPC_EXCP_SYNC;
339 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
340 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
342 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
343 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
345 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
346 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
348 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
349 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
351 typedef struct opcode_t {
352 unsigned char opc1, opc2, opc3;
353 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
354 unsigned char pad[5];
355 #else
356 unsigned char pad[1];
357 #endif
358 opc_handler_t handler;
359 const char *oname;
360 } opcode_t;
362 /*****************************************************************************/
363 /*** Instruction decoding ***/
364 #define EXTRACT_HELPER(name, shift, nb) \
365 static inline uint32_t name(uint32_t opcode) \
367 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
370 #define EXTRACT_SHELPER(name, shift, nb) \
371 static inline int32_t name(uint32_t opcode) \
373 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
376 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \
377 static inline uint32_t name(uint32_t opcode) \
379 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \
380 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \
382 /* Opcode part 1 */
383 EXTRACT_HELPER(opc1, 26, 6);
384 /* Opcode part 2 */
385 EXTRACT_HELPER(opc2, 1, 5);
386 /* Opcode part 3 */
387 EXTRACT_HELPER(opc3, 6, 5);
388 /* Update Cr0 flags */
389 EXTRACT_HELPER(Rc, 0, 1);
390 /* Destination */
391 EXTRACT_HELPER(rD, 21, 5);
392 /* Source */
393 EXTRACT_HELPER(rS, 21, 5);
394 /* First operand */
395 EXTRACT_HELPER(rA, 16, 5);
396 /* Second operand */
397 EXTRACT_HELPER(rB, 11, 5);
398 /* Third operand */
399 EXTRACT_HELPER(rC, 6, 5);
400 /*** Get CRn ***/
401 EXTRACT_HELPER(crfD, 23, 3);
402 EXTRACT_HELPER(crfS, 18, 3);
403 EXTRACT_HELPER(crbD, 21, 5);
404 EXTRACT_HELPER(crbA, 16, 5);
405 EXTRACT_HELPER(crbB, 11, 5);
406 /* SPR / TBL */
407 EXTRACT_HELPER(_SPR, 11, 10);
408 static inline uint32_t SPR(uint32_t opcode)
410 uint32_t sprn = _SPR(opcode);
412 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
414 /*** Get constants ***/
415 EXTRACT_HELPER(IMM, 12, 8);
416 /* 16 bits signed immediate value */
417 EXTRACT_SHELPER(SIMM, 0, 16);
418 /* 16 bits unsigned immediate value */
419 EXTRACT_HELPER(UIMM, 0, 16);
420 /* 5 bits signed immediate value */
421 EXTRACT_HELPER(SIMM5, 16, 5);
422 /* 5 bits signed immediate value */
423 EXTRACT_HELPER(UIMM5, 16, 5);
424 /* Bit count */
425 EXTRACT_HELPER(NB, 11, 5);
426 /* Shift count */
427 EXTRACT_HELPER(SH, 11, 5);
428 /* Vector shift count */
429 EXTRACT_HELPER(VSH, 6, 4);
430 /* Mask start */
431 EXTRACT_HELPER(MB, 6, 5);
432 /* Mask end */
433 EXTRACT_HELPER(ME, 1, 5);
434 /* Trap operand */
435 EXTRACT_HELPER(TO, 21, 5);
437 EXTRACT_HELPER(CRM, 12, 8);
438 EXTRACT_HELPER(SR, 16, 4);
440 /* mtfsf/mtfsfi */
441 EXTRACT_HELPER(FPBF, 23, 3);
442 EXTRACT_HELPER(FPIMM, 12, 4);
443 EXTRACT_HELPER(FPL, 25, 1);
444 EXTRACT_HELPER(FPFLM, 17, 8);
445 EXTRACT_HELPER(FPW, 16, 1);
447 /*** Jump target decoding ***/
448 /* Displacement */
449 EXTRACT_SHELPER(d, 0, 16);
450 /* Immediate address */
451 static inline target_ulong LI(uint32_t opcode)
453 return (opcode >> 0) & 0x03FFFFFC;
456 static inline uint32_t BD(uint32_t opcode)
458 return (opcode >> 0) & 0xFFFC;
461 EXTRACT_HELPER(BO, 21, 5);
462 EXTRACT_HELPER(BI, 16, 5);
463 /* Absolute/relative address */
464 EXTRACT_HELPER(AA, 1, 1);
465 /* Link */
466 EXTRACT_HELPER(LK, 0, 1);
468 /* Create a mask between <start> and <end> bits */
469 static inline target_ulong MASK(uint32_t start, uint32_t end)
471 target_ulong ret;
473 #if defined(TARGET_PPC64)
474 if (likely(start == 0)) {
475 ret = UINT64_MAX << (63 - end);
476 } else if (likely(end == 63)) {
477 ret = UINT64_MAX >> start;
479 #else
480 if (likely(start == 0)) {
481 ret = UINT32_MAX << (31 - end);
482 } else if (likely(end == 31)) {
483 ret = UINT32_MAX >> start;
485 #endif
486 else {
487 ret = (((target_ulong)(-1ULL)) >> (start)) ^
488 (((target_ulong)(-1ULL) >> (end)) >> 1);
489 if (unlikely(start > end))
490 return ~ret;
493 return ret;
496 EXTRACT_HELPER_SPLIT(xT, 0, 1, 21, 5);
497 EXTRACT_HELPER_SPLIT(xS, 0, 1, 21, 5);
498 EXTRACT_HELPER_SPLIT(xA, 2, 1, 16, 5);
499 EXTRACT_HELPER_SPLIT(xB, 1, 1, 11, 5);
500 EXTRACT_HELPER_SPLIT(xC, 3, 1, 6, 5);
501 EXTRACT_HELPER(DM, 8, 2);
502 EXTRACT_HELPER(UIM, 16, 2);
503 EXTRACT_HELPER(SHW, 8, 2);
504 /*****************************************************************************/
505 /* PowerPC instructions table */
507 #if defined(DO_PPC_STATISTICS)
508 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
510 .opc1 = op1, \
511 .opc2 = op2, \
512 .opc3 = op3, \
513 .pad = { 0, }, \
514 .handler = { \
515 .inval1 = invl, \
516 .type = _typ, \
517 .type2 = _typ2, \
518 .handler = &gen_##name, \
519 .oname = stringify(name), \
520 }, \
521 .oname = stringify(name), \
523 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
525 .opc1 = op1, \
526 .opc2 = op2, \
527 .opc3 = op3, \
528 .pad = { 0, }, \
529 .handler = { \
530 .inval1 = invl1, \
531 .inval2 = invl2, \
532 .type = _typ, \
533 .type2 = _typ2, \
534 .handler = &gen_##name, \
535 .oname = stringify(name), \
536 }, \
537 .oname = stringify(name), \
539 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
541 .opc1 = op1, \
542 .opc2 = op2, \
543 .opc3 = op3, \
544 .pad = { 0, }, \
545 .handler = { \
546 .inval1 = invl, \
547 .type = _typ, \
548 .type2 = _typ2, \
549 .handler = &gen_##name, \
550 .oname = onam, \
551 }, \
552 .oname = onam, \
554 #else
555 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
557 .opc1 = op1, \
558 .opc2 = op2, \
559 .opc3 = op3, \
560 .pad = { 0, }, \
561 .handler = { \
562 .inval1 = invl, \
563 .type = _typ, \
564 .type2 = _typ2, \
565 .handler = &gen_##name, \
566 }, \
567 .oname = stringify(name), \
569 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
571 .opc1 = op1, \
572 .opc2 = op2, \
573 .opc3 = op3, \
574 .pad = { 0, }, \
575 .handler = { \
576 .inval1 = invl1, \
577 .inval2 = invl2, \
578 .type = _typ, \
579 .type2 = _typ2, \
580 .handler = &gen_##name, \
581 }, \
582 .oname = stringify(name), \
584 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
586 .opc1 = op1, \
587 .opc2 = op2, \
588 .opc3 = op3, \
589 .pad = { 0, }, \
590 .handler = { \
591 .inval1 = invl, \
592 .type = _typ, \
593 .type2 = _typ2, \
594 .handler = &gen_##name, \
595 }, \
596 .oname = onam, \
598 #endif
600 /* SPR load/store helpers */
601 static inline void gen_load_spr(TCGv t, int reg)
603 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
606 static inline void gen_store_spr(int reg, TCGv t)
608 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
611 /* Invalid instruction */
612 static void gen_invalid(DisasContext *ctx)
614 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
617 static opc_handler_t invalid_handler = {
618 .inval1 = 0xFFFFFFFF,
619 .inval2 = 0xFFFFFFFF,
620 .type = PPC_NONE,
621 .type2 = PPC_NONE,
622 .handler = gen_invalid,
625 /*** Integer comparison ***/
627 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
629 TCGv t0 = tcg_temp_new();
630 TCGv_i32 t1 = tcg_temp_new_i32();
632 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
634 tcg_gen_setcond_tl((s ? TCG_COND_LT: TCG_COND_LTU), t0, arg0, arg1);
635 tcg_gen_trunc_tl_i32(t1, t0);
636 tcg_gen_shli_i32(t1, t1, CRF_LT);
637 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
639 tcg_gen_setcond_tl((s ? TCG_COND_GT: TCG_COND_GTU), t0, arg0, arg1);
640 tcg_gen_trunc_tl_i32(t1, t0);
641 tcg_gen_shli_i32(t1, t1, CRF_GT);
642 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
644 tcg_gen_setcond_tl(TCG_COND_EQ, t0, arg0, arg1);
645 tcg_gen_trunc_tl_i32(t1, t0);
646 tcg_gen_shli_i32(t1, t1, CRF_EQ);
647 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t1);
649 tcg_temp_free(t0);
650 tcg_temp_free_i32(t1);
653 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
655 TCGv t0 = tcg_const_tl(arg1);
656 gen_op_cmp(arg0, t0, s, crf);
657 tcg_temp_free(t0);
660 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
662 TCGv t0, t1;
663 t0 = tcg_temp_new();
664 t1 = tcg_temp_new();
665 if (s) {
666 tcg_gen_ext32s_tl(t0, arg0);
667 tcg_gen_ext32s_tl(t1, arg1);
668 } else {
669 tcg_gen_ext32u_tl(t0, arg0);
670 tcg_gen_ext32u_tl(t1, arg1);
672 gen_op_cmp(t0, t1, s, crf);
673 tcg_temp_free(t1);
674 tcg_temp_free(t0);
677 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
679 TCGv t0 = tcg_const_tl(arg1);
680 gen_op_cmp32(arg0, t0, s, crf);
681 tcg_temp_free(t0);
684 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
686 if (NARROW_MODE(ctx)) {
687 gen_op_cmpi32(reg, 0, 1, 0);
688 } else {
689 gen_op_cmpi(reg, 0, 1, 0);
693 /* cmp */
694 static void gen_cmp(DisasContext *ctx)
696 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
697 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
698 1, crfD(ctx->opcode));
699 } else {
700 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
701 1, crfD(ctx->opcode));
705 /* cmpi */
706 static void gen_cmpi(DisasContext *ctx)
708 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
709 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
710 1, crfD(ctx->opcode));
711 } else {
712 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
713 1, crfD(ctx->opcode));
717 /* cmpl */
718 static void gen_cmpl(DisasContext *ctx)
720 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
721 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
722 0, crfD(ctx->opcode));
723 } else {
724 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
725 0, crfD(ctx->opcode));
729 /* cmpli */
730 static void gen_cmpli(DisasContext *ctx)
732 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
733 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
734 0, crfD(ctx->opcode));
735 } else {
736 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
737 0, crfD(ctx->opcode));
741 /* isel (PowerPC 2.03 specification) */
742 static void gen_isel(DisasContext *ctx)
744 int l1, l2;
745 uint32_t bi = rC(ctx->opcode);
746 uint32_t mask;
747 TCGv_i32 t0;
749 l1 = gen_new_label();
750 l2 = gen_new_label();
752 mask = 1 << (3 - (bi & 0x03));
753 t0 = tcg_temp_new_i32();
754 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
755 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
756 if (rA(ctx->opcode) == 0)
757 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
758 else
759 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
760 tcg_gen_br(l2);
761 gen_set_label(l1);
762 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
763 gen_set_label(l2);
764 tcg_temp_free_i32(t0);
767 /* cmpb: PowerPC 2.05 specification */
768 static void gen_cmpb(DisasContext *ctx)
770 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
771 cpu_gpr[rB(ctx->opcode)]);
774 /*** Integer arithmetic ***/
776 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
777 TCGv arg1, TCGv arg2, int sub)
779 TCGv t0 = tcg_temp_new();
781 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
782 tcg_gen_xor_tl(t0, arg1, arg2);
783 if (sub) {
784 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
785 } else {
786 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
788 tcg_temp_free(t0);
789 if (NARROW_MODE(ctx)) {
790 tcg_gen_ext32s_tl(cpu_ov, cpu_ov);
792 tcg_gen_shri_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1);
793 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
796 /* Common add function */
797 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
798 TCGv arg2, bool add_ca, bool compute_ca,
799 bool compute_ov, bool compute_rc0)
801 TCGv t0 = ret;
803 if (compute_ca || compute_ov) {
804 t0 = tcg_temp_new();
807 if (compute_ca) {
808 if (NARROW_MODE(ctx)) {
809 /* Caution: a non-obvious corner case of the spec is that we
810 must produce the *entire* 64-bit addition, but produce the
811 carry into bit 32. */
812 TCGv t1 = tcg_temp_new();
813 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
814 tcg_gen_add_tl(t0, arg1, arg2);
815 if (add_ca) {
816 tcg_gen_add_tl(t0, t0, cpu_ca);
818 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changed w/ carry */
819 tcg_temp_free(t1);
820 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
821 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
822 } else {
823 TCGv zero = tcg_const_tl(0);
824 if (add_ca) {
825 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, cpu_ca, zero);
826 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, arg2, zero);
827 } else {
828 tcg_gen_add2_tl(t0, cpu_ca, arg1, zero, arg2, zero);
830 tcg_temp_free(zero);
832 } else {
833 tcg_gen_add_tl(t0, arg1, arg2);
834 if (add_ca) {
835 tcg_gen_add_tl(t0, t0, cpu_ca);
839 if (compute_ov) {
840 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
842 if (unlikely(compute_rc0)) {
843 gen_set_Rc0(ctx, t0);
846 if (!TCGV_EQUAL(t0, ret)) {
847 tcg_gen_mov_tl(ret, t0);
848 tcg_temp_free(t0);
851 /* Add functions with two operands */
852 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
853 static void glue(gen_, name)(DisasContext *ctx) \
855 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
856 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
857 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
859 /* Add functions with one operand and one immediate */
860 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
861 add_ca, compute_ca, compute_ov) \
862 static void glue(gen_, name)(DisasContext *ctx) \
864 TCGv t0 = tcg_const_tl(const_val); \
865 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
866 cpu_gpr[rA(ctx->opcode)], t0, \
867 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
868 tcg_temp_free(t0); \
871 /* add add. addo addo. */
872 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
873 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
874 /* addc addc. addco addco. */
875 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
876 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
877 /* adde adde. addeo addeo. */
878 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
879 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
880 /* addme addme. addmeo addmeo. */
881 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
882 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
883 /* addze addze. addzeo addzeo.*/
884 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
885 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
886 /* addi */
887 static void gen_addi(DisasContext *ctx)
889 target_long simm = SIMM(ctx->opcode);
891 if (rA(ctx->opcode) == 0) {
892 /* li case */
893 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
894 } else {
895 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
896 cpu_gpr[rA(ctx->opcode)], simm);
899 /* addic addic.*/
900 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
902 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
903 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
904 c, 0, 1, 0, compute_rc0);
905 tcg_temp_free(c);
908 static void gen_addic(DisasContext *ctx)
910 gen_op_addic(ctx, 0);
913 static void gen_addic_(DisasContext *ctx)
915 gen_op_addic(ctx, 1);
918 /* addis */
919 static void gen_addis(DisasContext *ctx)
921 target_long simm = SIMM(ctx->opcode);
923 if (rA(ctx->opcode) == 0) {
924 /* lis case */
925 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
926 } else {
927 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
928 cpu_gpr[rA(ctx->opcode)], simm << 16);
932 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
933 TCGv arg2, int sign, int compute_ov)
935 int l1 = gen_new_label();
936 int l2 = gen_new_label();
937 TCGv_i32 t0 = tcg_temp_local_new_i32();
938 TCGv_i32 t1 = tcg_temp_local_new_i32();
940 tcg_gen_trunc_tl_i32(t0, arg1);
941 tcg_gen_trunc_tl_i32(t1, arg2);
942 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
943 if (sign) {
944 int l3 = gen_new_label();
945 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
946 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
947 gen_set_label(l3);
948 tcg_gen_div_i32(t0, t0, t1);
949 } else {
950 tcg_gen_divu_i32(t0, t0, t1);
952 if (compute_ov) {
953 tcg_gen_movi_tl(cpu_ov, 0);
955 tcg_gen_br(l2);
956 gen_set_label(l1);
957 if (sign) {
958 tcg_gen_sari_i32(t0, t0, 31);
959 } else {
960 tcg_gen_movi_i32(t0, 0);
962 if (compute_ov) {
963 tcg_gen_movi_tl(cpu_ov, 1);
964 tcg_gen_movi_tl(cpu_so, 1);
966 gen_set_label(l2);
967 tcg_gen_extu_i32_tl(ret, t0);
968 tcg_temp_free_i32(t0);
969 tcg_temp_free_i32(t1);
970 if (unlikely(Rc(ctx->opcode) != 0))
971 gen_set_Rc0(ctx, ret);
973 /* Div functions */
974 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
975 static void glue(gen_, name)(DisasContext *ctx) \
977 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
978 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
979 sign, compute_ov); \
981 /* divwu divwu. divwuo divwuo. */
982 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
983 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
984 /* divw divw. divwo divwo. */
985 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
986 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
988 /* div[wd]eu[o][.] */
989 #define GEN_DIVE(name, hlpr, compute_ov) \
990 static void gen_##name(DisasContext *ctx) \
992 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
993 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
994 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
995 tcg_temp_free_i32(t0); \
996 if (unlikely(Rc(ctx->opcode) != 0)) { \
997 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1001 #if defined(TARGET_PPC64)
1002 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1003 TCGv arg2, int sign, int compute_ov)
1005 int l1 = gen_new_label();
1006 int l2 = gen_new_label();
1008 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1009 if (sign) {
1010 int l3 = gen_new_label();
1011 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1012 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1013 gen_set_label(l3);
1014 tcg_gen_div_i64(ret, arg1, arg2);
1015 } else {
1016 tcg_gen_divu_i64(ret, arg1, arg2);
1018 if (compute_ov) {
1019 tcg_gen_movi_tl(cpu_ov, 0);
1021 tcg_gen_br(l2);
1022 gen_set_label(l1);
1023 if (sign) {
1024 tcg_gen_sari_i64(ret, arg1, 63);
1025 } else {
1026 tcg_gen_movi_i64(ret, 0);
1028 if (compute_ov) {
1029 tcg_gen_movi_tl(cpu_ov, 1);
1030 tcg_gen_movi_tl(cpu_so, 1);
1032 gen_set_label(l2);
1033 if (unlikely(Rc(ctx->opcode) != 0))
1034 gen_set_Rc0(ctx, ret);
1036 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1037 static void glue(gen_, name)(DisasContext *ctx) \
1039 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1040 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1041 sign, compute_ov); \
1043 /* divwu divwu. divwuo divwuo. */
1044 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1045 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1046 /* divw divw. divwo divwo. */
1047 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1048 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1050 GEN_DIVE(divdeu, divdeu, 0);
1051 GEN_DIVE(divdeuo, divdeu, 1);
1053 #endif
1055 /* mulhw mulhw. */
1056 static void gen_mulhw(DisasContext *ctx)
1058 TCGv_i32 t0 = tcg_temp_new_i32();
1059 TCGv_i32 t1 = tcg_temp_new_i32();
1061 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1062 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1063 tcg_gen_muls2_i32(t0, t1, t0, t1);
1064 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1065 tcg_temp_free_i32(t0);
1066 tcg_temp_free_i32(t1);
1067 if (unlikely(Rc(ctx->opcode) != 0))
1068 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1071 /* mulhwu mulhwu. */
1072 static void gen_mulhwu(DisasContext *ctx)
1074 TCGv_i32 t0 = tcg_temp_new_i32();
1075 TCGv_i32 t1 = tcg_temp_new_i32();
1077 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1078 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1079 tcg_gen_mulu2_i32(t0, t1, t0, t1);
1080 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1081 tcg_temp_free_i32(t0);
1082 tcg_temp_free_i32(t1);
1083 if (unlikely(Rc(ctx->opcode) != 0))
1084 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1087 /* mullw mullw. */
1088 static void gen_mullw(DisasContext *ctx)
1090 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1091 cpu_gpr[rB(ctx->opcode)]);
1092 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1093 if (unlikely(Rc(ctx->opcode) != 0))
1094 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1097 /* mullwo mullwo. */
1098 static void gen_mullwo(DisasContext *ctx)
1100 TCGv_i32 t0 = tcg_temp_new_i32();
1101 TCGv_i32 t1 = tcg_temp_new_i32();
1103 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1104 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1105 tcg_gen_muls2_i32(t0, t1, t0, t1);
1106 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
1108 tcg_gen_sari_i32(t0, t0, 31);
1109 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1110 tcg_gen_extu_i32_tl(cpu_ov, t0);
1111 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1113 tcg_temp_free_i32(t0);
1114 tcg_temp_free_i32(t1);
1115 if (unlikely(Rc(ctx->opcode) != 0))
1116 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1119 /* mulli */
1120 static void gen_mulli(DisasContext *ctx)
1122 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1123 SIMM(ctx->opcode));
1126 #if defined(TARGET_PPC64)
1127 /* mulhd mulhd. */
1128 static void gen_mulhd(DisasContext *ctx)
1130 TCGv lo = tcg_temp_new();
1131 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1132 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1133 tcg_temp_free(lo);
1134 if (unlikely(Rc(ctx->opcode) != 0)) {
1135 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1139 /* mulhdu mulhdu. */
1140 static void gen_mulhdu(DisasContext *ctx)
1142 TCGv lo = tcg_temp_new();
1143 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1144 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1145 tcg_temp_free(lo);
1146 if (unlikely(Rc(ctx->opcode) != 0)) {
1147 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1151 /* mulld mulld. */
1152 static void gen_mulld(DisasContext *ctx)
1154 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1155 cpu_gpr[rB(ctx->opcode)]);
1156 if (unlikely(Rc(ctx->opcode) != 0))
1157 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1160 /* mulldo mulldo. */
1161 static void gen_mulldo(DisasContext *ctx)
1163 gen_helper_mulldo(cpu_gpr[rD(ctx->opcode)], cpu_env,
1164 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1165 if (unlikely(Rc(ctx->opcode) != 0)) {
1166 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1169 #endif
1171 /* Common subf function */
1172 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1173 TCGv arg2, bool add_ca, bool compute_ca,
1174 bool compute_ov, bool compute_rc0)
1176 TCGv t0 = ret;
1178 if (compute_ca || compute_ov) {
1179 t0 = tcg_temp_new();
1182 if (compute_ca) {
1183 /* dest = ~arg1 + arg2 [+ ca]. */
1184 if (NARROW_MODE(ctx)) {
1185 /* Caution: a non-obvious corner case of the spec is that we
1186 must produce the *entire* 64-bit addition, but produce the
1187 carry into bit 32. */
1188 TCGv inv1 = tcg_temp_new();
1189 TCGv t1 = tcg_temp_new();
1190 tcg_gen_not_tl(inv1, arg1);
1191 if (add_ca) {
1192 tcg_gen_add_tl(t0, arg2, cpu_ca);
1193 } else {
1194 tcg_gen_addi_tl(t0, arg2, 1);
1196 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
1197 tcg_gen_add_tl(t0, t0, inv1);
1198 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
1199 tcg_temp_free(t1);
1200 tcg_gen_shri_tl(cpu_ca, cpu_ca, 32); /* extract bit 32 */
1201 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
1202 } else if (add_ca) {
1203 TCGv zero, inv1 = tcg_temp_new();
1204 tcg_gen_not_tl(inv1, arg1);
1205 zero = tcg_const_tl(0);
1206 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1207 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1208 tcg_temp_free(zero);
1209 tcg_temp_free(inv1);
1210 } else {
1211 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1212 tcg_gen_sub_tl(t0, arg2, arg1);
1214 } else if (add_ca) {
1215 /* Since we're ignoring carry-out, we can simplify the
1216 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1217 tcg_gen_sub_tl(t0, arg2, arg1);
1218 tcg_gen_add_tl(t0, t0, cpu_ca);
1219 tcg_gen_subi_tl(t0, t0, 1);
1220 } else {
1221 tcg_gen_sub_tl(t0, arg2, arg1);
1224 if (compute_ov) {
1225 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1227 if (unlikely(compute_rc0)) {
1228 gen_set_Rc0(ctx, t0);
1231 if (!TCGV_EQUAL(t0, ret)) {
1232 tcg_gen_mov_tl(ret, t0);
1233 tcg_temp_free(t0);
1236 /* Sub functions with Two operands functions */
1237 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1238 static void glue(gen_, name)(DisasContext *ctx) \
1240 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1241 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1242 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1244 /* Sub functions with one operand and one immediate */
1245 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1246 add_ca, compute_ca, compute_ov) \
1247 static void glue(gen_, name)(DisasContext *ctx) \
1249 TCGv t0 = tcg_const_tl(const_val); \
1250 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1251 cpu_gpr[rA(ctx->opcode)], t0, \
1252 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1253 tcg_temp_free(t0); \
1255 /* subf subf. subfo subfo. */
1256 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1257 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1258 /* subfc subfc. subfco subfco. */
1259 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1260 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1261 /* subfe subfe. subfeo subfo. */
1262 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1263 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1264 /* subfme subfme. subfmeo subfmeo. */
1265 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1266 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1267 /* subfze subfze. subfzeo subfzeo.*/
1268 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1269 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1271 /* subfic */
1272 static void gen_subfic(DisasContext *ctx)
1274 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1275 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1276 c, 0, 1, 0, 0);
1277 tcg_temp_free(c);
1280 /* neg neg. nego nego. */
1281 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
1283 TCGv zero = tcg_const_tl(0);
1284 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1285 zero, 0, 0, compute_ov, Rc(ctx->opcode));
1286 tcg_temp_free(zero);
1289 static void gen_neg(DisasContext *ctx)
1291 gen_op_arith_neg(ctx, 0);
1294 static void gen_nego(DisasContext *ctx)
1296 gen_op_arith_neg(ctx, 1);
1299 /*** Integer logical ***/
1300 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1301 static void glue(gen_, name)(DisasContext *ctx) \
1303 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1304 cpu_gpr[rB(ctx->opcode)]); \
1305 if (unlikely(Rc(ctx->opcode) != 0)) \
1306 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1309 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1310 static void glue(gen_, name)(DisasContext *ctx) \
1312 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1313 if (unlikely(Rc(ctx->opcode) != 0)) \
1314 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1317 /* and & and. */
1318 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1319 /* andc & andc. */
1320 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1322 /* andi. */
1323 static void gen_andi_(DisasContext *ctx)
1325 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1326 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1329 /* andis. */
1330 static void gen_andis_(DisasContext *ctx)
1332 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1333 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1336 /* cntlzw */
1337 static void gen_cntlzw(DisasContext *ctx)
1339 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1340 if (unlikely(Rc(ctx->opcode) != 0))
1341 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1343 /* eqv & eqv. */
1344 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1345 /* extsb & extsb. */
1346 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1347 /* extsh & extsh. */
1348 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1349 /* nand & nand. */
1350 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1351 /* nor & nor. */
1352 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1354 /* or & or. */
1355 static void gen_or(DisasContext *ctx)
1357 int rs, ra, rb;
1359 rs = rS(ctx->opcode);
1360 ra = rA(ctx->opcode);
1361 rb = rB(ctx->opcode);
1362 /* Optimisation for mr. ri case */
1363 if (rs != ra || rs != rb) {
1364 if (rs != rb)
1365 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1366 else
1367 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1368 if (unlikely(Rc(ctx->opcode) != 0))
1369 gen_set_Rc0(ctx, cpu_gpr[ra]);
1370 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1371 gen_set_Rc0(ctx, cpu_gpr[rs]);
1372 #if defined(TARGET_PPC64)
1373 } else {
1374 int prio = 0;
1376 switch (rs) {
1377 case 1:
1378 /* Set process priority to low */
1379 prio = 2;
1380 break;
1381 case 6:
1382 /* Set process priority to medium-low */
1383 prio = 3;
1384 break;
1385 case 2:
1386 /* Set process priority to normal */
1387 prio = 4;
1388 break;
1389 #if !defined(CONFIG_USER_ONLY)
1390 case 31:
1391 if (ctx->mem_idx > 0) {
1392 /* Set process priority to very low */
1393 prio = 1;
1395 break;
1396 case 5:
1397 if (ctx->mem_idx > 0) {
1398 /* Set process priority to medium-hight */
1399 prio = 5;
1401 break;
1402 case 3:
1403 if (ctx->mem_idx > 0) {
1404 /* Set process priority to high */
1405 prio = 6;
1407 break;
1408 case 7:
1409 if (ctx->mem_idx > 1) {
1410 /* Set process priority to very high */
1411 prio = 7;
1413 break;
1414 #endif
1415 default:
1416 /* nop */
1417 break;
1419 if (prio) {
1420 TCGv t0 = tcg_temp_new();
1421 gen_load_spr(t0, SPR_PPR);
1422 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1423 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1424 gen_store_spr(SPR_PPR, t0);
1425 tcg_temp_free(t0);
1427 #endif
1430 /* orc & orc. */
1431 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1433 /* xor & xor. */
1434 static void gen_xor(DisasContext *ctx)
1436 /* Optimisation for "set to zero" case */
1437 if (rS(ctx->opcode) != rB(ctx->opcode))
1438 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1439 else
1440 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1441 if (unlikely(Rc(ctx->opcode) != 0))
1442 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1445 /* ori */
1446 static void gen_ori(DisasContext *ctx)
1448 target_ulong uimm = UIMM(ctx->opcode);
1450 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1451 /* NOP */
1452 /* XXX: should handle special NOPs for POWER series */
1453 return;
1455 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1458 /* oris */
1459 static void gen_oris(DisasContext *ctx)
1461 target_ulong uimm = UIMM(ctx->opcode);
1463 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1464 /* NOP */
1465 return;
1467 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1470 /* xori */
1471 static void gen_xori(DisasContext *ctx)
1473 target_ulong uimm = UIMM(ctx->opcode);
1475 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1476 /* NOP */
1477 return;
1479 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1482 /* xoris */
1483 static void gen_xoris(DisasContext *ctx)
1485 target_ulong uimm = UIMM(ctx->opcode);
1487 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1488 /* NOP */
1489 return;
1491 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1494 /* popcntb : PowerPC 2.03 specification */
1495 static void gen_popcntb(DisasContext *ctx)
1497 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1500 static void gen_popcntw(DisasContext *ctx)
1502 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1505 #if defined(TARGET_PPC64)
1506 /* popcntd: PowerPC 2.06 specification */
1507 static void gen_popcntd(DisasContext *ctx)
1509 gen_helper_popcntd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1511 #endif
1513 /* prtyw: PowerPC 2.05 specification */
1514 static void gen_prtyw(DisasContext *ctx)
1516 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1517 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1518 TCGv t0 = tcg_temp_new();
1519 tcg_gen_shri_tl(t0, rs, 16);
1520 tcg_gen_xor_tl(ra, rs, t0);
1521 tcg_gen_shri_tl(t0, ra, 8);
1522 tcg_gen_xor_tl(ra, ra, t0);
1523 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
1524 tcg_temp_free(t0);
1527 #if defined(TARGET_PPC64)
1528 /* prtyd: PowerPC 2.05 specification */
1529 static void gen_prtyd(DisasContext *ctx)
1531 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1532 TCGv rs = cpu_gpr[rS(ctx->opcode)];
1533 TCGv t0 = tcg_temp_new();
1534 tcg_gen_shri_tl(t0, rs, 32);
1535 tcg_gen_xor_tl(ra, rs, t0);
1536 tcg_gen_shri_tl(t0, ra, 16);
1537 tcg_gen_xor_tl(ra, ra, t0);
1538 tcg_gen_shri_tl(t0, ra, 8);
1539 tcg_gen_xor_tl(ra, ra, t0);
1540 tcg_gen_andi_tl(ra, ra, 1);
1541 tcg_temp_free(t0);
1543 #endif
1545 #if defined(TARGET_PPC64)
1546 /* bpermd */
1547 static void gen_bpermd(DisasContext *ctx)
1549 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
1550 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1552 #endif
1554 #if defined(TARGET_PPC64)
1555 /* extsw & extsw. */
1556 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1558 /* cntlzd */
1559 static void gen_cntlzd(DisasContext *ctx)
1561 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1562 if (unlikely(Rc(ctx->opcode) != 0))
1563 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1565 #endif
1567 /*** Integer rotate ***/
1569 /* rlwimi & rlwimi. */
1570 static void gen_rlwimi(DisasContext *ctx)
1572 uint32_t mb, me, sh;
1574 mb = MB(ctx->opcode);
1575 me = ME(ctx->opcode);
1576 sh = SH(ctx->opcode);
1577 if (likely(sh == 0 && mb == 0 && me == 31)) {
1578 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1579 } else {
1580 target_ulong mask;
1581 TCGv t1;
1582 TCGv t0 = tcg_temp_new();
1583 #if defined(TARGET_PPC64)
1584 TCGv_i32 t2 = tcg_temp_new_i32();
1585 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1586 tcg_gen_rotli_i32(t2, t2, sh);
1587 tcg_gen_extu_i32_i64(t0, t2);
1588 tcg_temp_free_i32(t2);
1589 #else
1590 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1591 #endif
1592 #if defined(TARGET_PPC64)
1593 mb += 32;
1594 me += 32;
1595 #endif
1596 mask = MASK(mb, me);
1597 t1 = tcg_temp_new();
1598 tcg_gen_andi_tl(t0, t0, mask);
1599 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1600 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1601 tcg_temp_free(t0);
1602 tcg_temp_free(t1);
1604 if (unlikely(Rc(ctx->opcode) != 0))
1605 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1608 /* rlwinm & rlwinm. */
1609 static void gen_rlwinm(DisasContext *ctx)
1611 uint32_t mb, me, sh;
1613 sh = SH(ctx->opcode);
1614 mb = MB(ctx->opcode);
1615 me = ME(ctx->opcode);
1617 if (likely(mb == 0 && me == (31 - sh))) {
1618 if (likely(sh == 0)) {
1619 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1620 } else {
1621 TCGv t0 = tcg_temp_new();
1622 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1623 tcg_gen_shli_tl(t0, t0, sh);
1624 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1625 tcg_temp_free(t0);
1627 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1628 TCGv t0 = tcg_temp_new();
1629 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1630 tcg_gen_shri_tl(t0, t0, mb);
1631 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1632 tcg_temp_free(t0);
1633 } else {
1634 TCGv t0 = tcg_temp_new();
1635 #if defined(TARGET_PPC64)
1636 TCGv_i32 t1 = tcg_temp_new_i32();
1637 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1638 tcg_gen_rotli_i32(t1, t1, sh);
1639 tcg_gen_extu_i32_i64(t0, t1);
1640 tcg_temp_free_i32(t1);
1641 #else
1642 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1643 #endif
1644 #if defined(TARGET_PPC64)
1645 mb += 32;
1646 me += 32;
1647 #endif
1648 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1649 tcg_temp_free(t0);
1651 if (unlikely(Rc(ctx->opcode) != 0))
1652 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1655 /* rlwnm & rlwnm. */
1656 static void gen_rlwnm(DisasContext *ctx)
1658 uint32_t mb, me;
1659 TCGv t0;
1660 #if defined(TARGET_PPC64)
1661 TCGv_i32 t1, t2;
1662 #endif
1664 mb = MB(ctx->opcode);
1665 me = ME(ctx->opcode);
1666 t0 = tcg_temp_new();
1667 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1668 #if defined(TARGET_PPC64)
1669 t1 = tcg_temp_new_i32();
1670 t2 = tcg_temp_new_i32();
1671 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1672 tcg_gen_trunc_i64_i32(t2, t0);
1673 tcg_gen_rotl_i32(t1, t1, t2);
1674 tcg_gen_extu_i32_i64(t0, t1);
1675 tcg_temp_free_i32(t1);
1676 tcg_temp_free_i32(t2);
1677 #else
1678 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1679 #endif
1680 if (unlikely(mb != 0 || me != 31)) {
1681 #if defined(TARGET_PPC64)
1682 mb += 32;
1683 me += 32;
1684 #endif
1685 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1686 } else {
1687 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1689 tcg_temp_free(t0);
1690 if (unlikely(Rc(ctx->opcode) != 0))
1691 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1694 #if defined(TARGET_PPC64)
1695 #define GEN_PPC64_R2(name, opc1, opc2) \
1696 static void glue(gen_, name##0)(DisasContext *ctx) \
1698 gen_##name(ctx, 0); \
1701 static void glue(gen_, name##1)(DisasContext *ctx) \
1703 gen_##name(ctx, 1); \
1705 #define GEN_PPC64_R4(name, opc1, opc2) \
1706 static void glue(gen_, name##0)(DisasContext *ctx) \
1708 gen_##name(ctx, 0, 0); \
1711 static void glue(gen_, name##1)(DisasContext *ctx) \
1713 gen_##name(ctx, 0, 1); \
1716 static void glue(gen_, name##2)(DisasContext *ctx) \
1718 gen_##name(ctx, 1, 0); \
1721 static void glue(gen_, name##3)(DisasContext *ctx) \
1723 gen_##name(ctx, 1, 1); \
1726 static inline void gen_rldinm(DisasContext *ctx, uint32_t mb, uint32_t me,
1727 uint32_t sh)
1729 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1730 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1731 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1732 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1733 } else {
1734 TCGv t0 = tcg_temp_new();
1735 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1736 if (likely(mb == 0 && me == 63)) {
1737 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1738 } else {
1739 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1741 tcg_temp_free(t0);
1743 if (unlikely(Rc(ctx->opcode) != 0))
1744 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1746 /* rldicl - rldicl. */
1747 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
1749 uint32_t sh, mb;
1751 sh = SH(ctx->opcode) | (shn << 5);
1752 mb = MB(ctx->opcode) | (mbn << 5);
1753 gen_rldinm(ctx, mb, 63, sh);
1755 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1756 /* rldicr - rldicr. */
1757 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
1759 uint32_t sh, me;
1761 sh = SH(ctx->opcode) | (shn << 5);
1762 me = MB(ctx->opcode) | (men << 5);
1763 gen_rldinm(ctx, 0, me, sh);
1765 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1766 /* rldic - rldic. */
1767 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
1769 uint32_t sh, mb;
1771 sh = SH(ctx->opcode) | (shn << 5);
1772 mb = MB(ctx->opcode) | (mbn << 5);
1773 gen_rldinm(ctx, mb, 63 - sh, sh);
1775 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1777 static inline void gen_rldnm(DisasContext *ctx, uint32_t mb, uint32_t me)
1779 TCGv t0;
1781 t0 = tcg_temp_new();
1782 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1783 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1784 if (unlikely(mb != 0 || me != 63)) {
1785 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1786 } else {
1787 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1789 tcg_temp_free(t0);
1790 if (unlikely(Rc(ctx->opcode) != 0))
1791 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1794 /* rldcl - rldcl. */
1795 static inline void gen_rldcl(DisasContext *ctx, int mbn)
1797 uint32_t mb;
1799 mb = MB(ctx->opcode) | (mbn << 5);
1800 gen_rldnm(ctx, mb, 63);
1802 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1803 /* rldcr - rldcr. */
1804 static inline void gen_rldcr(DisasContext *ctx, int men)
1806 uint32_t me;
1808 me = MB(ctx->opcode) | (men << 5);
1809 gen_rldnm(ctx, 0, me);
1811 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1812 /* rldimi - rldimi. */
1813 static inline void gen_rldimi(DisasContext *ctx, int mbn, int shn)
1815 uint32_t sh, mb, me;
1817 sh = SH(ctx->opcode) | (shn << 5);
1818 mb = MB(ctx->opcode) | (mbn << 5);
1819 me = 63 - sh;
1820 if (unlikely(sh == 0 && mb == 0)) {
1821 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1822 } else {
1823 TCGv t0, t1;
1824 target_ulong mask;
1826 t0 = tcg_temp_new();
1827 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1828 t1 = tcg_temp_new();
1829 mask = MASK(mb, me);
1830 tcg_gen_andi_tl(t0, t0, mask);
1831 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1832 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1833 tcg_temp_free(t0);
1834 tcg_temp_free(t1);
1836 if (unlikely(Rc(ctx->opcode) != 0))
1837 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1839 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1840 #endif
1842 /*** Integer shift ***/
1844 /* slw & slw. */
1845 static void gen_slw(DisasContext *ctx)
1847 TCGv t0, t1;
1849 t0 = tcg_temp_new();
1850 /* AND rS with a mask that is 0 when rB >= 0x20 */
1851 #if defined(TARGET_PPC64)
1852 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1853 tcg_gen_sari_tl(t0, t0, 0x3f);
1854 #else
1855 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1856 tcg_gen_sari_tl(t0, t0, 0x1f);
1857 #endif
1858 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1859 t1 = tcg_temp_new();
1860 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1861 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1862 tcg_temp_free(t1);
1863 tcg_temp_free(t0);
1864 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1865 if (unlikely(Rc(ctx->opcode) != 0))
1866 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1869 /* sraw & sraw. */
1870 static void gen_sraw(DisasContext *ctx)
1872 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
1873 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1874 if (unlikely(Rc(ctx->opcode) != 0))
1875 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1878 /* srawi & srawi. */
1879 static void gen_srawi(DisasContext *ctx)
1881 int sh = SH(ctx->opcode);
1882 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1883 TCGv src = cpu_gpr[rS(ctx->opcode)];
1884 if (sh == 0) {
1885 tcg_gen_mov_tl(dst, src);
1886 tcg_gen_movi_tl(cpu_ca, 0);
1887 } else {
1888 TCGv t0;
1889 tcg_gen_ext32s_tl(dst, src);
1890 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
1891 t0 = tcg_temp_new();
1892 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
1893 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1894 tcg_temp_free(t0);
1895 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1896 tcg_gen_sari_tl(dst, dst, sh);
1898 if (unlikely(Rc(ctx->opcode) != 0)) {
1899 gen_set_Rc0(ctx, dst);
1903 /* srw & srw. */
1904 static void gen_srw(DisasContext *ctx)
1906 TCGv t0, t1;
1908 t0 = tcg_temp_new();
1909 /* AND rS with a mask that is 0 when rB >= 0x20 */
1910 #if defined(TARGET_PPC64)
1911 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
1912 tcg_gen_sari_tl(t0, t0, 0x3f);
1913 #else
1914 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
1915 tcg_gen_sari_tl(t0, t0, 0x1f);
1916 #endif
1917 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1918 tcg_gen_ext32u_tl(t0, t0);
1919 t1 = tcg_temp_new();
1920 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
1921 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1922 tcg_temp_free(t1);
1923 tcg_temp_free(t0);
1924 if (unlikely(Rc(ctx->opcode) != 0))
1925 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1928 #if defined(TARGET_PPC64)
1929 /* sld & sld. */
1930 static void gen_sld(DisasContext *ctx)
1932 TCGv t0, t1;
1934 t0 = tcg_temp_new();
1935 /* AND rS with a mask that is 0 when rB >= 0x40 */
1936 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1937 tcg_gen_sari_tl(t0, t0, 0x3f);
1938 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1939 t1 = tcg_temp_new();
1940 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
1941 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1942 tcg_temp_free(t1);
1943 tcg_temp_free(t0);
1944 if (unlikely(Rc(ctx->opcode) != 0))
1945 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1948 /* srad & srad. */
1949 static void gen_srad(DisasContext *ctx)
1951 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
1952 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1953 if (unlikely(Rc(ctx->opcode) != 0))
1954 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1956 /* sradi & sradi. */
1957 static inline void gen_sradi(DisasContext *ctx, int n)
1959 int sh = SH(ctx->opcode) + (n << 5);
1960 TCGv dst = cpu_gpr[rA(ctx->opcode)];
1961 TCGv src = cpu_gpr[rS(ctx->opcode)];
1962 if (sh == 0) {
1963 tcg_gen_mov_tl(dst, src);
1964 tcg_gen_movi_tl(cpu_ca, 0);
1965 } else {
1966 TCGv t0;
1967 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
1968 t0 = tcg_temp_new();
1969 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
1970 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
1971 tcg_temp_free(t0);
1972 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
1973 tcg_gen_sari_tl(dst, src, sh);
1975 if (unlikely(Rc(ctx->opcode) != 0)) {
1976 gen_set_Rc0(ctx, dst);
1980 static void gen_sradi0(DisasContext *ctx)
1982 gen_sradi(ctx, 0);
1985 static void gen_sradi1(DisasContext *ctx)
1987 gen_sradi(ctx, 1);
1990 /* srd & srd. */
1991 static void gen_srd(DisasContext *ctx)
1993 TCGv t0, t1;
1995 t0 = tcg_temp_new();
1996 /* AND rS with a mask that is 0 when rB >= 0x40 */
1997 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
1998 tcg_gen_sari_tl(t0, t0, 0x3f);
1999 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2000 t1 = tcg_temp_new();
2001 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2002 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2003 tcg_temp_free(t1);
2004 tcg_temp_free(t0);
2005 if (unlikely(Rc(ctx->opcode) != 0))
2006 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2008 #endif
2010 /*** Floating-Point arithmetic ***/
2011 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2012 static void gen_f##name(DisasContext *ctx) \
2014 if (unlikely(!ctx->fpu_enabled)) { \
2015 gen_exception(ctx, POWERPC_EXCP_FPU); \
2016 return; \
2018 /* NIP cannot be restored if the memory exception comes from an helper */ \
2019 gen_update_nip(ctx, ctx->nip - 4); \
2020 gen_reset_fpstatus(); \
2021 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2022 cpu_fpr[rA(ctx->opcode)], \
2023 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2024 if (isfloat) { \
2025 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2026 cpu_fpr[rD(ctx->opcode)]); \
2028 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2029 Rc(ctx->opcode) != 0); \
2032 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2033 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2034 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2036 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2037 static void gen_f##name(DisasContext *ctx) \
2039 if (unlikely(!ctx->fpu_enabled)) { \
2040 gen_exception(ctx, POWERPC_EXCP_FPU); \
2041 return; \
2043 /* NIP cannot be restored if the memory exception comes from an helper */ \
2044 gen_update_nip(ctx, ctx->nip - 4); \
2045 gen_reset_fpstatus(); \
2046 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2047 cpu_fpr[rA(ctx->opcode)], \
2048 cpu_fpr[rB(ctx->opcode)]); \
2049 if (isfloat) { \
2050 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2051 cpu_fpr[rD(ctx->opcode)]); \
2053 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2054 set_fprf, Rc(ctx->opcode) != 0); \
2056 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2057 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2058 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2060 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2061 static void gen_f##name(DisasContext *ctx) \
2063 if (unlikely(!ctx->fpu_enabled)) { \
2064 gen_exception(ctx, POWERPC_EXCP_FPU); \
2065 return; \
2067 /* NIP cannot be restored if the memory exception comes from an helper */ \
2068 gen_update_nip(ctx, ctx->nip - 4); \
2069 gen_reset_fpstatus(); \
2070 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2071 cpu_fpr[rA(ctx->opcode)], \
2072 cpu_fpr[rC(ctx->opcode)]); \
2073 if (isfloat) { \
2074 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2075 cpu_fpr[rD(ctx->opcode)]); \
2077 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2078 set_fprf, Rc(ctx->opcode) != 0); \
2080 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2081 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2082 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2084 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2085 static void gen_f##name(DisasContext *ctx) \
2087 if (unlikely(!ctx->fpu_enabled)) { \
2088 gen_exception(ctx, POWERPC_EXCP_FPU); \
2089 return; \
2091 /* NIP cannot be restored if the memory exception comes from an helper */ \
2092 gen_update_nip(ctx, ctx->nip - 4); \
2093 gen_reset_fpstatus(); \
2094 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2095 cpu_fpr[rB(ctx->opcode)]); \
2096 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2097 set_fprf, Rc(ctx->opcode) != 0); \
2100 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2101 static void gen_f##name(DisasContext *ctx) \
2103 if (unlikely(!ctx->fpu_enabled)) { \
2104 gen_exception(ctx, POWERPC_EXCP_FPU); \
2105 return; \
2107 /* NIP cannot be restored if the memory exception comes from an helper */ \
2108 gen_update_nip(ctx, ctx->nip - 4); \
2109 gen_reset_fpstatus(); \
2110 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2111 cpu_fpr[rB(ctx->opcode)]); \
2112 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2113 set_fprf, Rc(ctx->opcode) != 0); \
2116 /* fadd - fadds */
2117 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2118 /* fdiv - fdivs */
2119 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2120 /* fmul - fmuls */
2121 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2123 /* fre */
2124 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2126 /* fres */
2127 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2129 /* frsqrte */
2130 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2132 /* frsqrtes */
2133 static void gen_frsqrtes(DisasContext *ctx)
2135 if (unlikely(!ctx->fpu_enabled)) {
2136 gen_exception(ctx, POWERPC_EXCP_FPU);
2137 return;
2139 /* NIP cannot be restored if the memory exception comes from an helper */
2140 gen_update_nip(ctx, ctx->nip - 4);
2141 gen_reset_fpstatus();
2142 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_env,
2143 cpu_fpr[rB(ctx->opcode)]);
2144 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2145 cpu_fpr[rD(ctx->opcode)]);
2146 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2149 /* fsel */
2150 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2151 /* fsub - fsubs */
2152 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2153 /* Optional: */
2155 /* fsqrt */
2156 static void gen_fsqrt(DisasContext *ctx)
2158 if (unlikely(!ctx->fpu_enabled)) {
2159 gen_exception(ctx, POWERPC_EXCP_FPU);
2160 return;
2162 /* NIP cannot be restored if the memory exception comes from an helper */
2163 gen_update_nip(ctx, ctx->nip - 4);
2164 gen_reset_fpstatus();
2165 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2166 cpu_fpr[rB(ctx->opcode)]);
2167 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2170 static void gen_fsqrts(DisasContext *ctx)
2172 if (unlikely(!ctx->fpu_enabled)) {
2173 gen_exception(ctx, POWERPC_EXCP_FPU);
2174 return;
2176 /* NIP cannot be restored if the memory exception comes from an helper */
2177 gen_update_nip(ctx, ctx->nip - 4);
2178 gen_reset_fpstatus();
2179 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
2180 cpu_fpr[rB(ctx->opcode)]);
2181 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
2182 cpu_fpr[rD(ctx->opcode)]);
2183 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 1, Rc(ctx->opcode) != 0);
2186 /*** Floating-Point multiply-and-add ***/
2187 /* fmadd - fmadds */
2188 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2189 /* fmsub - fmsubs */
2190 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2191 /* fnmadd - fnmadds */
2192 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2193 /* fnmsub - fnmsubs */
2194 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2196 /*** Floating-Point round & convert ***/
2197 /* fctiw */
2198 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2199 /* fctiwz */
2200 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2201 /* frsp */
2202 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2203 #if defined(TARGET_PPC64)
2204 /* fcfid */
2205 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2206 /* fctid */
2207 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2208 /* fctidz */
2209 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2210 #endif
2212 /* frin */
2213 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2214 /* friz */
2215 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2216 /* frip */
2217 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2218 /* frim */
2219 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2221 /*** Floating-Point compare ***/
2223 /* fcmpo */
2224 static void gen_fcmpo(DisasContext *ctx)
2226 TCGv_i32 crf;
2227 if (unlikely(!ctx->fpu_enabled)) {
2228 gen_exception(ctx, POWERPC_EXCP_FPU);
2229 return;
2231 /* NIP cannot be restored if the memory exception comes from an helper */
2232 gen_update_nip(ctx, ctx->nip - 4);
2233 gen_reset_fpstatus();
2234 crf = tcg_const_i32(crfD(ctx->opcode));
2235 gen_helper_fcmpo(cpu_env, cpu_fpr[rA(ctx->opcode)],
2236 cpu_fpr[rB(ctx->opcode)], crf);
2237 tcg_temp_free_i32(crf);
2238 gen_helper_float_check_status(cpu_env);
2241 /* fcmpu */
2242 static void gen_fcmpu(DisasContext *ctx)
2244 TCGv_i32 crf;
2245 if (unlikely(!ctx->fpu_enabled)) {
2246 gen_exception(ctx, POWERPC_EXCP_FPU);
2247 return;
2249 /* NIP cannot be restored if the memory exception comes from an helper */
2250 gen_update_nip(ctx, ctx->nip - 4);
2251 gen_reset_fpstatus();
2252 crf = tcg_const_i32(crfD(ctx->opcode));
2253 gen_helper_fcmpu(cpu_env, cpu_fpr[rA(ctx->opcode)],
2254 cpu_fpr[rB(ctx->opcode)], crf);
2255 tcg_temp_free_i32(crf);
2256 gen_helper_float_check_status(cpu_env);
2259 /*** Floating-point move ***/
2260 /* fabs */
2261 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2262 static void gen_fabs(DisasContext *ctx)
2264 if (unlikely(!ctx->fpu_enabled)) {
2265 gen_exception(ctx, POWERPC_EXCP_FPU);
2266 return;
2268 tcg_gen_andi_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2269 ~(1ULL << 63));
2270 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2273 /* fmr - fmr. */
2274 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2275 static void gen_fmr(DisasContext *ctx)
2277 if (unlikely(!ctx->fpu_enabled)) {
2278 gen_exception(ctx, POWERPC_EXCP_FPU);
2279 return;
2281 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
2282 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2285 /* fnabs */
2286 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2287 static void gen_fnabs(DisasContext *ctx)
2289 if (unlikely(!ctx->fpu_enabled)) {
2290 gen_exception(ctx, POWERPC_EXCP_FPU);
2291 return;
2293 tcg_gen_ori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2294 1ULL << 63);
2295 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2298 /* fneg */
2299 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2300 static void gen_fneg(DisasContext *ctx)
2302 if (unlikely(!ctx->fpu_enabled)) {
2303 gen_exception(ctx, POWERPC_EXCP_FPU);
2304 return;
2306 tcg_gen_xori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
2307 1ULL << 63);
2308 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2311 /* fcpsgn: PowerPC 2.05 specification */
2312 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
2313 static void gen_fcpsgn(DisasContext *ctx)
2315 if (unlikely(!ctx->fpu_enabled)) {
2316 gen_exception(ctx, POWERPC_EXCP_FPU);
2317 return;
2319 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
2320 cpu_fpr[rB(ctx->opcode)], 0, 63);
2321 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2324 static void gen_fmrgew(DisasContext *ctx)
2326 TCGv_i64 b0;
2327 if (unlikely(!ctx->fpu_enabled)) {
2328 gen_exception(ctx, POWERPC_EXCP_FPU);
2329 return;
2331 b0 = tcg_temp_new_i64();
2332 tcg_gen_shri_i64(b0, cpu_fpr[rB(ctx->opcode)], 32);
2333 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
2334 b0, 0, 32);
2335 tcg_temp_free_i64(b0);
2338 static void gen_fmrgow(DisasContext *ctx)
2340 if (unlikely(!ctx->fpu_enabled)) {
2341 gen_exception(ctx, POWERPC_EXCP_FPU);
2342 return;
2344 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)],
2345 cpu_fpr[rB(ctx->opcode)],
2346 cpu_fpr[rA(ctx->opcode)],
2347 32, 32);
2350 /*** Floating-Point status & ctrl register ***/
2352 /* mcrfs */
2353 static void gen_mcrfs(DisasContext *ctx)
2355 TCGv tmp = tcg_temp_new();
2356 int bfa;
2358 if (unlikely(!ctx->fpu_enabled)) {
2359 gen_exception(ctx, POWERPC_EXCP_FPU);
2360 return;
2362 bfa = 4 * (7 - crfS(ctx->opcode));
2363 tcg_gen_shri_tl(tmp, cpu_fpscr, bfa);
2364 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
2365 tcg_temp_free(tmp);
2366 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2367 tcg_gen_andi_tl(cpu_fpscr, cpu_fpscr, ~(0xF << bfa));
2370 /* mffs */
2371 static void gen_mffs(DisasContext *ctx)
2373 if (unlikely(!ctx->fpu_enabled)) {
2374 gen_exception(ctx, POWERPC_EXCP_FPU);
2375 return;
2377 gen_reset_fpstatus();
2378 tcg_gen_extu_tl_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
2379 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], 0, Rc(ctx->opcode) != 0);
2382 /* mtfsb0 */
2383 static void gen_mtfsb0(DisasContext *ctx)
2385 uint8_t crb;
2387 if (unlikely(!ctx->fpu_enabled)) {
2388 gen_exception(ctx, POWERPC_EXCP_FPU);
2389 return;
2391 crb = 31 - crbD(ctx->opcode);
2392 gen_reset_fpstatus();
2393 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
2394 TCGv_i32 t0;
2395 /* NIP cannot be restored if the memory exception comes from an helper */
2396 gen_update_nip(ctx, ctx->nip - 4);
2397 t0 = tcg_const_i32(crb);
2398 gen_helper_fpscr_clrbit(cpu_env, t0);
2399 tcg_temp_free_i32(t0);
2401 if (unlikely(Rc(ctx->opcode) != 0)) {
2402 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2403 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2407 /* mtfsb1 */
2408 static void gen_mtfsb1(DisasContext *ctx)
2410 uint8_t crb;
2412 if (unlikely(!ctx->fpu_enabled)) {
2413 gen_exception(ctx, POWERPC_EXCP_FPU);
2414 return;
2416 crb = 31 - crbD(ctx->opcode);
2417 gen_reset_fpstatus();
2418 /* XXX: we pretend we can only do IEEE floating-point computations */
2419 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
2420 TCGv_i32 t0;
2421 /* NIP cannot be restored if the memory exception comes from an helper */
2422 gen_update_nip(ctx, ctx->nip - 4);
2423 t0 = tcg_const_i32(crb);
2424 gen_helper_fpscr_setbit(cpu_env, t0);
2425 tcg_temp_free_i32(t0);
2427 if (unlikely(Rc(ctx->opcode) != 0)) {
2428 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2429 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2431 /* We can raise a differed exception */
2432 gen_helper_float_check_status(cpu_env);
2435 /* mtfsf */
2436 static void gen_mtfsf(DisasContext *ctx)
2438 TCGv_i32 t0;
2439 int flm, l, w;
2441 if (unlikely(!ctx->fpu_enabled)) {
2442 gen_exception(ctx, POWERPC_EXCP_FPU);
2443 return;
2445 flm = FPFLM(ctx->opcode);
2446 l = FPL(ctx->opcode);
2447 w = FPW(ctx->opcode);
2448 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2449 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2450 return;
2452 /* NIP cannot be restored if the memory exception comes from an helper */
2453 gen_update_nip(ctx, ctx->nip - 4);
2454 gen_reset_fpstatus();
2455 if (l) {
2456 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
2457 } else {
2458 t0 = tcg_const_i32(flm << (w * 8));
2460 gen_helper_store_fpscr(cpu_env, cpu_fpr[rB(ctx->opcode)], t0);
2461 tcg_temp_free_i32(t0);
2462 if (unlikely(Rc(ctx->opcode) != 0)) {
2463 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2464 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2466 /* We can raise a differed exception */
2467 gen_helper_float_check_status(cpu_env);
2470 /* mtfsfi */
2471 static void gen_mtfsfi(DisasContext *ctx)
2473 int bf, sh, w;
2474 TCGv_i64 t0;
2475 TCGv_i32 t1;
2477 if (unlikely(!ctx->fpu_enabled)) {
2478 gen_exception(ctx, POWERPC_EXCP_FPU);
2479 return;
2481 w = FPW(ctx->opcode);
2482 bf = FPBF(ctx->opcode);
2483 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
2484 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2485 return;
2487 sh = (8 * w) + 7 - bf;
2488 /* NIP cannot be restored if the memory exception comes from an helper */
2489 gen_update_nip(ctx, ctx->nip - 4);
2490 gen_reset_fpstatus();
2491 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
2492 t1 = tcg_const_i32(1 << sh);
2493 gen_helper_store_fpscr(cpu_env, t0, t1);
2494 tcg_temp_free_i64(t0);
2495 tcg_temp_free_i32(t1);
2496 if (unlikely(Rc(ctx->opcode) != 0)) {
2497 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
2498 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
2500 /* We can raise a differed exception */
2501 gen_helper_float_check_status(cpu_env);
2504 /*** Addressing modes ***/
2505 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2506 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2507 target_long maskl)
2509 target_long simm = SIMM(ctx->opcode);
2511 simm &= ~maskl;
2512 if (rA(ctx->opcode) == 0) {
2513 if (NARROW_MODE(ctx)) {
2514 simm = (uint32_t)simm;
2516 tcg_gen_movi_tl(EA, simm);
2517 } else if (likely(simm != 0)) {
2518 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2519 if (NARROW_MODE(ctx)) {
2520 tcg_gen_ext32u_tl(EA, EA);
2522 } else {
2523 if (NARROW_MODE(ctx)) {
2524 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2525 } else {
2526 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2531 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2533 if (rA(ctx->opcode) == 0) {
2534 if (NARROW_MODE(ctx)) {
2535 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2536 } else {
2537 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2539 } else {
2540 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2541 if (NARROW_MODE(ctx)) {
2542 tcg_gen_ext32u_tl(EA, EA);
2547 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2549 if (rA(ctx->opcode) == 0) {
2550 tcg_gen_movi_tl(EA, 0);
2551 } else if (NARROW_MODE(ctx)) {
2552 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2553 } else {
2554 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2558 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2559 target_long val)
2561 tcg_gen_addi_tl(ret, arg1, val);
2562 if (NARROW_MODE(ctx)) {
2563 tcg_gen_ext32u_tl(ret, ret);
2567 static inline void gen_check_align(DisasContext *ctx, TCGv EA, int mask)
2569 int l1 = gen_new_label();
2570 TCGv t0 = tcg_temp_new();
2571 TCGv_i32 t1, t2;
2572 /* NIP cannot be restored if the memory exception comes from an helper */
2573 gen_update_nip(ctx, ctx->nip - 4);
2574 tcg_gen_andi_tl(t0, EA, mask);
2575 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2576 t1 = tcg_const_i32(POWERPC_EXCP_ALIGN);
2577 t2 = tcg_const_i32(0);
2578 gen_helper_raise_exception_err(cpu_env, t1, t2);
2579 tcg_temp_free_i32(t1);
2580 tcg_temp_free_i32(t2);
2581 gen_set_label(l1);
2582 tcg_temp_free(t0);
2585 /*** Integer load ***/
2586 static inline void gen_qemu_ld8u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2588 tcg_gen_qemu_ld8u(arg1, arg2, ctx->mem_idx);
2591 static inline void gen_qemu_ld8s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2593 tcg_gen_qemu_ld8s(arg1, arg2, ctx->mem_idx);
2596 static inline void gen_qemu_ld16u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2598 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2599 if (unlikely(ctx->le_mode)) {
2600 tcg_gen_bswap16_tl(arg1, arg1);
2604 static inline void gen_qemu_ld16s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2606 if (unlikely(ctx->le_mode)) {
2607 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2608 tcg_gen_bswap16_tl(arg1, arg1);
2609 tcg_gen_ext16s_tl(arg1, arg1);
2610 } else {
2611 tcg_gen_qemu_ld16s(arg1, arg2, ctx->mem_idx);
2615 static inline void gen_qemu_ld32u(DisasContext *ctx, TCGv arg1, TCGv arg2)
2617 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2618 if (unlikely(ctx->le_mode)) {
2619 tcg_gen_bswap32_tl(arg1, arg1);
2623 static void gen_qemu_ld32u_i64(DisasContext *ctx, TCGv_i64 val, TCGv addr)
2625 TCGv tmp = tcg_temp_new();
2626 gen_qemu_ld32u(ctx, tmp, addr);
2627 tcg_gen_extu_tl_i64(val, tmp);
2628 tcg_temp_free(tmp);
2631 static inline void gen_qemu_ld32s(DisasContext *ctx, TCGv arg1, TCGv arg2)
2633 if (unlikely(ctx->le_mode)) {
2634 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2635 tcg_gen_bswap32_tl(arg1, arg1);
2636 tcg_gen_ext32s_tl(arg1, arg1);
2637 } else
2638 tcg_gen_qemu_ld32s(arg1, arg2, ctx->mem_idx);
2641 static void gen_qemu_ld32s_i64(DisasContext *ctx, TCGv_i64 val, TCGv addr)
2643 TCGv tmp = tcg_temp_new();
2644 gen_qemu_ld32s(ctx, tmp, addr);
2645 tcg_gen_ext_tl_i64(val, tmp);
2646 tcg_temp_free(tmp);
2649 static inline void gen_qemu_ld64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2651 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
2652 if (unlikely(ctx->le_mode)) {
2653 tcg_gen_bswap64_i64(arg1, arg1);
2657 static inline void gen_qemu_st8(DisasContext *ctx, TCGv arg1, TCGv arg2)
2659 tcg_gen_qemu_st8(arg1, arg2, ctx->mem_idx);
2662 static inline void gen_qemu_st16(DisasContext *ctx, TCGv arg1, TCGv arg2)
2664 if (unlikely(ctx->le_mode)) {
2665 TCGv t0 = tcg_temp_new();
2666 tcg_gen_ext16u_tl(t0, arg1);
2667 tcg_gen_bswap16_tl(t0, t0);
2668 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
2669 tcg_temp_free(t0);
2670 } else {
2671 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
2675 static inline void gen_qemu_st32(DisasContext *ctx, TCGv arg1, TCGv arg2)
2677 if (unlikely(ctx->le_mode)) {
2678 TCGv t0 = tcg_temp_new();
2679 tcg_gen_ext32u_tl(t0, arg1);
2680 tcg_gen_bswap32_tl(t0, t0);
2681 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
2682 tcg_temp_free(t0);
2683 } else {
2684 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
2688 static void gen_qemu_st32_i64(DisasContext *ctx, TCGv_i64 val, TCGv addr)
2690 TCGv tmp = tcg_temp_new();
2691 tcg_gen_trunc_i64_tl(tmp, val);
2692 gen_qemu_st32(ctx, tmp, addr);
2693 tcg_temp_free(tmp);
2696 static inline void gen_qemu_st64(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
2698 if (unlikely(ctx->le_mode)) {
2699 TCGv_i64 t0 = tcg_temp_new_i64();
2700 tcg_gen_bswap64_i64(t0, arg1);
2701 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
2702 tcg_temp_free_i64(t0);
2703 } else
2704 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
2707 #define GEN_LD(name, ldop, opc, type) \
2708 static void glue(gen_, name)(DisasContext *ctx) \
2710 TCGv EA; \
2711 gen_set_access_type(ctx, ACCESS_INT); \
2712 EA = tcg_temp_new(); \
2713 gen_addr_imm_index(ctx, EA, 0); \
2714 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2715 tcg_temp_free(EA); \
2718 #define GEN_LDU(name, ldop, opc, type) \
2719 static void glue(gen_, name##u)(DisasContext *ctx) \
2721 TCGv EA; \
2722 if (unlikely(rA(ctx->opcode) == 0 || \
2723 rA(ctx->opcode) == rD(ctx->opcode))) { \
2724 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2725 return; \
2727 gen_set_access_type(ctx, ACCESS_INT); \
2728 EA = tcg_temp_new(); \
2729 if (type == PPC_64B) \
2730 gen_addr_imm_index(ctx, EA, 0x03); \
2731 else \
2732 gen_addr_imm_index(ctx, EA, 0); \
2733 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2734 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2735 tcg_temp_free(EA); \
2738 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2739 static void glue(gen_, name##ux)(DisasContext *ctx) \
2741 TCGv EA; \
2742 if (unlikely(rA(ctx->opcode) == 0 || \
2743 rA(ctx->opcode) == rD(ctx->opcode))) { \
2744 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2745 return; \
2747 gen_set_access_type(ctx, ACCESS_INT); \
2748 EA = tcg_temp_new(); \
2749 gen_addr_reg_index(ctx, EA); \
2750 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2751 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2752 tcg_temp_free(EA); \
2755 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
2756 static void glue(gen_, name##x)(DisasContext *ctx) \
2758 TCGv EA; \
2759 gen_set_access_type(ctx, ACCESS_INT); \
2760 EA = tcg_temp_new(); \
2761 gen_addr_reg_index(ctx, EA); \
2762 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2763 tcg_temp_free(EA); \
2765 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2766 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE)
2768 #define GEN_LDS(name, ldop, op, type) \
2769 GEN_LD(name, ldop, op | 0x20, type); \
2770 GEN_LDU(name, ldop, op | 0x21, type); \
2771 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2772 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2774 /* lbz lbzu lbzux lbzx */
2775 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
2776 /* lha lhau lhaux lhax */
2777 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
2778 /* lhz lhzu lhzux lhzx */
2779 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
2780 /* lwz lwzu lwzux lwzx */
2781 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
2782 #if defined(TARGET_PPC64)
2783 /* lwaux */
2784 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
2785 /* lwax */
2786 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
2787 /* ldux */
2788 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B);
2789 /* ldx */
2790 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B);
2792 static void gen_ld(DisasContext *ctx)
2794 TCGv EA;
2795 if (Rc(ctx->opcode)) {
2796 if (unlikely(rA(ctx->opcode) == 0 ||
2797 rA(ctx->opcode) == rD(ctx->opcode))) {
2798 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2799 return;
2802 gen_set_access_type(ctx, ACCESS_INT);
2803 EA = tcg_temp_new();
2804 gen_addr_imm_index(ctx, EA, 0x03);
2805 if (ctx->opcode & 0x02) {
2806 /* lwa (lwau is undefined) */
2807 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2808 } else {
2809 /* ld - ldu */
2810 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
2812 if (Rc(ctx->opcode))
2813 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2814 tcg_temp_free(EA);
2817 /* lq */
2818 static void gen_lq(DisasContext *ctx)
2820 #if defined(CONFIG_USER_ONLY)
2821 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2822 #else
2823 int ra, rd;
2824 TCGv EA;
2826 /* Restore CPU state */
2827 if (unlikely(ctx->mem_idx == 0)) {
2828 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2829 return;
2831 ra = rA(ctx->opcode);
2832 rd = rD(ctx->opcode);
2833 if (unlikely((rd & 1) || rd == ra)) {
2834 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2835 return;
2837 if (unlikely(ctx->le_mode)) {
2838 /* Little-endian mode is not handled */
2839 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2840 return;
2842 gen_set_access_type(ctx, ACCESS_INT);
2843 EA = tcg_temp_new();
2844 gen_addr_imm_index(ctx, EA, 0x0F);
2845 gen_qemu_ld64(ctx, cpu_gpr[rd], EA);
2846 gen_addr_add(ctx, EA, EA, 8);
2847 gen_qemu_ld64(ctx, cpu_gpr[rd+1], EA);
2848 tcg_temp_free(EA);
2849 #endif
2851 #endif
2853 /*** Integer store ***/
2854 #define GEN_ST(name, stop, opc, type) \
2855 static void glue(gen_, name)(DisasContext *ctx) \
2857 TCGv EA; \
2858 gen_set_access_type(ctx, ACCESS_INT); \
2859 EA = tcg_temp_new(); \
2860 gen_addr_imm_index(ctx, EA, 0); \
2861 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2862 tcg_temp_free(EA); \
2865 #define GEN_STU(name, stop, opc, type) \
2866 static void glue(gen_, stop##u)(DisasContext *ctx) \
2868 TCGv EA; \
2869 if (unlikely(rA(ctx->opcode) == 0)) { \
2870 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2871 return; \
2873 gen_set_access_type(ctx, ACCESS_INT); \
2874 EA = tcg_temp_new(); \
2875 if (type == PPC_64B) \
2876 gen_addr_imm_index(ctx, EA, 0x03); \
2877 else \
2878 gen_addr_imm_index(ctx, EA, 0); \
2879 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2880 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2881 tcg_temp_free(EA); \
2884 #define GEN_STUX(name, stop, opc2, opc3, type) \
2885 static void glue(gen_, name##ux)(DisasContext *ctx) \
2887 TCGv EA; \
2888 if (unlikely(rA(ctx->opcode) == 0)) { \
2889 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2890 return; \
2892 gen_set_access_type(ctx, ACCESS_INT); \
2893 EA = tcg_temp_new(); \
2894 gen_addr_reg_index(ctx, EA); \
2895 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2897 tcg_temp_free(EA); \
2900 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
2901 static void glue(gen_, name##x)(DisasContext *ctx) \
2903 TCGv EA; \
2904 gen_set_access_type(ctx, ACCESS_INT); \
2905 EA = tcg_temp_new(); \
2906 gen_addr_reg_index(ctx, EA); \
2907 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2908 tcg_temp_free(EA); \
2910 #define GEN_STX(name, stop, opc2, opc3, type) \
2911 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE)
2913 #define GEN_STS(name, stop, op, type) \
2914 GEN_ST(name, stop, op | 0x20, type); \
2915 GEN_STU(name, stop, op | 0x21, type); \
2916 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2917 GEN_STX(name, stop, 0x17, op | 0x00, type)
2919 /* stb stbu stbux stbx */
2920 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
2921 /* sth sthu sthux sthx */
2922 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
2923 /* stw stwu stwux stwx */
2924 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
2925 #if defined(TARGET_PPC64)
2926 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B);
2927 GEN_STX(std, st64, 0x15, 0x04, PPC_64B);
2929 static void gen_std(DisasContext *ctx)
2931 int rs;
2932 TCGv EA;
2934 rs = rS(ctx->opcode);
2935 if ((ctx->opcode & 0x3) == 0x2) {
2936 #if defined(CONFIG_USER_ONLY)
2937 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2938 #else
2939 /* stq */
2940 if (unlikely(ctx->mem_idx == 0)) {
2941 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
2942 return;
2944 if (unlikely(rs & 1)) {
2945 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2946 return;
2948 if (unlikely(ctx->le_mode)) {
2949 /* Little-endian mode is not handled */
2950 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2951 return;
2953 gen_set_access_type(ctx, ACCESS_INT);
2954 EA = tcg_temp_new();
2955 gen_addr_imm_index(ctx, EA, 0x03);
2956 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2957 gen_addr_add(ctx, EA, EA, 8);
2958 gen_qemu_st64(ctx, cpu_gpr[rs+1], EA);
2959 tcg_temp_free(EA);
2960 #endif
2961 } else {
2962 /* std / stdu */
2963 if (Rc(ctx->opcode)) {
2964 if (unlikely(rA(ctx->opcode) == 0)) {
2965 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
2966 return;
2969 gen_set_access_type(ctx, ACCESS_INT);
2970 EA = tcg_temp_new();
2971 gen_addr_imm_index(ctx, EA, 0x03);
2972 gen_qemu_st64(ctx, cpu_gpr[rs], EA);
2973 if (Rc(ctx->opcode))
2974 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2975 tcg_temp_free(EA);
2978 #endif
2979 /*** Integer load and store with byte reverse ***/
2980 /* lhbrx */
2981 static inline void gen_qemu_ld16ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2983 tcg_gen_qemu_ld16u(arg1, arg2, ctx->mem_idx);
2984 if (likely(!ctx->le_mode)) {
2985 tcg_gen_bswap16_tl(arg1, arg1);
2988 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2990 /* lwbrx */
2991 static inline void gen_qemu_ld32ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
2993 tcg_gen_qemu_ld32u(arg1, arg2, ctx->mem_idx);
2994 if (likely(!ctx->le_mode)) {
2995 tcg_gen_bswap32_tl(arg1, arg1);
2998 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3000 #if defined(TARGET_PPC64)
3001 /* ldbrx */
3002 static inline void gen_qemu_ld64ur(DisasContext *ctx, TCGv arg1, TCGv arg2)
3004 tcg_gen_qemu_ld64(arg1, arg2, ctx->mem_idx);
3005 if (likely(!ctx->le_mode)) {
3006 tcg_gen_bswap64_tl(arg1, arg1);
3009 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX);
3010 #endif /* TARGET_PPC64 */
3012 /* sthbrx */
3013 static inline void gen_qemu_st16r(DisasContext *ctx, TCGv arg1, TCGv arg2)
3015 if (likely(!ctx->le_mode)) {
3016 TCGv t0 = tcg_temp_new();
3017 tcg_gen_ext16u_tl(t0, arg1);
3018 tcg_gen_bswap16_tl(t0, t0);
3019 tcg_gen_qemu_st16(t0, arg2, ctx->mem_idx);
3020 tcg_temp_free(t0);
3021 } else {
3022 tcg_gen_qemu_st16(arg1, arg2, ctx->mem_idx);
3025 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3027 /* stwbrx */
3028 static inline void gen_qemu_st32r(DisasContext *ctx, TCGv arg1, TCGv arg2)
3030 if (likely(!ctx->le_mode)) {
3031 TCGv t0 = tcg_temp_new();
3032 tcg_gen_ext32u_tl(t0, arg1);
3033 tcg_gen_bswap32_tl(t0, t0);
3034 tcg_gen_qemu_st32(t0, arg2, ctx->mem_idx);
3035 tcg_temp_free(t0);
3036 } else {
3037 tcg_gen_qemu_st32(arg1, arg2, ctx->mem_idx);
3040 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3042 #if defined(TARGET_PPC64)
3043 /* stdbrx */
3044 static inline void gen_qemu_st64r(DisasContext *ctx, TCGv arg1, TCGv arg2)
3046 if (likely(!ctx->le_mode)) {
3047 TCGv t0 = tcg_temp_new();
3048 tcg_gen_bswap64_tl(t0, arg1);
3049 tcg_gen_qemu_st64(t0, arg2, ctx->mem_idx);
3050 tcg_temp_free(t0);
3051 } else {
3052 tcg_gen_qemu_st64(arg1, arg2, ctx->mem_idx);
3055 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX);
3056 #endif /* TARGET_PPC64 */
3058 /*** Integer load and store multiple ***/
3060 /* lmw */
3061 static void gen_lmw(DisasContext *ctx)
3063 TCGv t0;
3064 TCGv_i32 t1;
3065 gen_set_access_type(ctx, ACCESS_INT);
3066 /* NIP cannot be restored if the memory exception comes from an helper */
3067 gen_update_nip(ctx, ctx->nip - 4);
3068 t0 = tcg_temp_new();
3069 t1 = tcg_const_i32(rD(ctx->opcode));
3070 gen_addr_imm_index(ctx, t0, 0);
3071 gen_helper_lmw(cpu_env, t0, t1);
3072 tcg_temp_free(t0);
3073 tcg_temp_free_i32(t1);
3076 /* stmw */
3077 static void gen_stmw(DisasContext *ctx)
3079 TCGv t0;
3080 TCGv_i32 t1;
3081 gen_set_access_type(ctx, ACCESS_INT);
3082 /* NIP cannot be restored if the memory exception comes from an helper */
3083 gen_update_nip(ctx, ctx->nip - 4);
3084 t0 = tcg_temp_new();
3085 t1 = tcg_const_i32(rS(ctx->opcode));
3086 gen_addr_imm_index(ctx, t0, 0);
3087 gen_helper_stmw(cpu_env, t0, t1);
3088 tcg_temp_free(t0);
3089 tcg_temp_free_i32(t1);
3092 /*** Integer load and store strings ***/
3094 /* lswi */
3095 /* PowerPC32 specification says we must generate an exception if
3096 * rA is in the range of registers to be loaded.
3097 * In an other hand, IBM says this is valid, but rA won't be loaded.
3098 * For now, I'll follow the spec...
3100 static void gen_lswi(DisasContext *ctx)
3102 TCGv t0;
3103 TCGv_i32 t1, t2;
3104 int nb = NB(ctx->opcode);
3105 int start = rD(ctx->opcode);
3106 int ra = rA(ctx->opcode);
3107 int nr;
3109 if (nb == 0)
3110 nb = 32;
3111 nr = nb / 4;
3112 if (unlikely(((start + nr) > 32 &&
3113 start <= ra && (start + nr - 32) > ra) ||
3114 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3115 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3116 return;
3118 gen_set_access_type(ctx, ACCESS_INT);
3119 /* NIP cannot be restored if the memory exception comes from an helper */
3120 gen_update_nip(ctx, ctx->nip - 4);
3121 t0 = tcg_temp_new();
3122 gen_addr_register(ctx, t0);
3123 t1 = tcg_const_i32(nb);
3124 t2 = tcg_const_i32(start);
3125 gen_helper_lsw(cpu_env, t0, t1, t2);
3126 tcg_temp_free(t0);
3127 tcg_temp_free_i32(t1);
3128 tcg_temp_free_i32(t2);
3131 /* lswx */
3132 static void gen_lswx(DisasContext *ctx)
3134 TCGv t0;
3135 TCGv_i32 t1, t2, t3;
3136 gen_set_access_type(ctx, ACCESS_INT);
3137 /* NIP cannot be restored if the memory exception comes from an helper */
3138 gen_update_nip(ctx, ctx->nip - 4);
3139 t0 = tcg_temp_new();
3140 gen_addr_reg_index(ctx, t0);
3141 t1 = tcg_const_i32(rD(ctx->opcode));
3142 t2 = tcg_const_i32(rA(ctx->opcode));
3143 t3 = tcg_const_i32(rB(ctx->opcode));
3144 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3145 tcg_temp_free(t0);
3146 tcg_temp_free_i32(t1);
3147 tcg_temp_free_i32(t2);
3148 tcg_temp_free_i32(t3);
3151 /* stswi */
3152 static void gen_stswi(DisasContext *ctx)
3154 TCGv t0;
3155 TCGv_i32 t1, t2;
3156 int nb = NB(ctx->opcode);
3157 gen_set_access_type(ctx, ACCESS_INT);
3158 /* NIP cannot be restored if the memory exception comes from an helper */
3159 gen_update_nip(ctx, ctx->nip - 4);
3160 t0 = tcg_temp_new();
3161 gen_addr_register(ctx, t0);
3162 if (nb == 0)
3163 nb = 32;
3164 t1 = tcg_const_i32(nb);
3165 t2 = tcg_const_i32(rS(ctx->opcode));
3166 gen_helper_stsw(cpu_env, t0, t1, t2);
3167 tcg_temp_free(t0);
3168 tcg_temp_free_i32(t1);
3169 tcg_temp_free_i32(t2);
3172 /* stswx */
3173 static void gen_stswx(DisasContext *ctx)
3175 TCGv t0;
3176 TCGv_i32 t1, t2;
3177 gen_set_access_type(ctx, ACCESS_INT);
3178 /* NIP cannot be restored if the memory exception comes from an helper */
3179 gen_update_nip(ctx, ctx->nip - 4);
3180 t0 = tcg_temp_new();
3181 gen_addr_reg_index(ctx, t0);
3182 t1 = tcg_temp_new_i32();
3183 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3184 tcg_gen_andi_i32(t1, t1, 0x7F);
3185 t2 = tcg_const_i32(rS(ctx->opcode));
3186 gen_helper_stsw(cpu_env, t0, t1, t2);
3187 tcg_temp_free(t0);
3188 tcg_temp_free_i32(t1);
3189 tcg_temp_free_i32(t2);
3192 /*** Memory synchronisation ***/
3193 /* eieio */
3194 static void gen_eieio(DisasContext *ctx)
3198 /* isync */
3199 static void gen_isync(DisasContext *ctx)
3201 gen_stop_exception(ctx);
3204 /* lwarx */
3205 static void gen_lwarx(DisasContext *ctx)
3207 TCGv t0;
3208 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3209 gen_set_access_type(ctx, ACCESS_RES);
3210 t0 = tcg_temp_local_new();
3211 gen_addr_reg_index(ctx, t0);
3212 gen_check_align(ctx, t0, 0x03);
3213 gen_qemu_ld32u(ctx, gpr, t0);
3214 tcg_gen_mov_tl(cpu_reserve, t0);
3215 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3216 tcg_temp_free(t0);
3219 #if defined(CONFIG_USER_ONLY)
3220 static void gen_conditional_store (DisasContext *ctx, TCGv EA,
3221 int reg, int size)
3223 TCGv t0 = tcg_temp_new();
3224 uint32_t save_exception = ctx->exception;
3226 tcg_gen_st_tl(EA, cpu_env, offsetof(CPUPPCState, reserve_ea));
3227 tcg_gen_movi_tl(t0, (size << 5) | reg);
3228 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, reserve_info));
3229 tcg_temp_free(t0);
3230 gen_update_nip(ctx, ctx->nip-4);
3231 ctx->exception = POWERPC_EXCP_BRANCH;
3232 gen_exception(ctx, POWERPC_EXCP_STCX);
3233 ctx->exception = save_exception;
3235 #endif
3237 /* stwcx. */
3238 static void gen_stwcx_(DisasContext *ctx)
3240 TCGv t0;
3241 gen_set_access_type(ctx, ACCESS_RES);
3242 t0 = tcg_temp_local_new();
3243 gen_addr_reg_index(ctx, t0);
3244 gen_check_align(ctx, t0, 0x03);
3245 #if defined(CONFIG_USER_ONLY)
3246 gen_conditional_store(ctx, t0, rS(ctx->opcode), 4);
3247 #else
3249 int l1;
3251 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3252 l1 = gen_new_label();
3253 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3254 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3255 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3256 gen_set_label(l1);
3257 tcg_gen_movi_tl(cpu_reserve, -1);
3259 #endif
3260 tcg_temp_free(t0);
3263 #if defined(TARGET_PPC64)
3264 /* ldarx */
3265 static void gen_ldarx(DisasContext *ctx)
3267 TCGv t0;
3268 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3269 gen_set_access_type(ctx, ACCESS_RES);
3270 t0 = tcg_temp_local_new();
3271 gen_addr_reg_index(ctx, t0);
3272 gen_check_align(ctx, t0, 0x07);
3273 gen_qemu_ld64(ctx, gpr, t0);
3274 tcg_gen_mov_tl(cpu_reserve, t0);
3275 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val));
3276 tcg_temp_free(t0);
3279 /* stdcx. */
3280 static void gen_stdcx_(DisasContext *ctx)
3282 TCGv t0;
3283 gen_set_access_type(ctx, ACCESS_RES);
3284 t0 = tcg_temp_local_new();
3285 gen_addr_reg_index(ctx, t0);
3286 gen_check_align(ctx, t0, 0x07);
3287 #if defined(CONFIG_USER_ONLY)
3288 gen_conditional_store(ctx, t0, rS(ctx->opcode), 8);
3289 #else
3291 int l1;
3292 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3293 l1 = gen_new_label();
3294 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3295 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 1 << CRF_EQ);
3296 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], t0);
3297 gen_set_label(l1);
3298 tcg_gen_movi_tl(cpu_reserve, -1);
3300 #endif
3301 tcg_temp_free(t0);
3303 #endif /* defined(TARGET_PPC64) */
3305 /* sync */
3306 static void gen_sync(DisasContext *ctx)
3310 /* wait */
3311 static void gen_wait(DisasContext *ctx)
3313 TCGv_i32 t0 = tcg_temp_new_i32();
3314 tcg_gen_st_i32(t0, cpu_env,
3315 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3316 tcg_temp_free_i32(t0);
3317 /* Stop translation, as the CPU is supposed to sleep from now */
3318 gen_exception_err(ctx, EXCP_HLT, 1);
3321 /*** Floating-point load ***/
3322 #define GEN_LDF(name, ldop, opc, type) \
3323 static void glue(gen_, name)(DisasContext *ctx) \
3325 TCGv EA; \
3326 if (unlikely(!ctx->fpu_enabled)) { \
3327 gen_exception(ctx, POWERPC_EXCP_FPU); \
3328 return; \
3330 gen_set_access_type(ctx, ACCESS_FLOAT); \
3331 EA = tcg_temp_new(); \
3332 gen_addr_imm_index(ctx, EA, 0); \
3333 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3334 tcg_temp_free(EA); \
3337 #define GEN_LDUF(name, ldop, opc, type) \
3338 static void glue(gen_, name##u)(DisasContext *ctx) \
3340 TCGv EA; \
3341 if (unlikely(!ctx->fpu_enabled)) { \
3342 gen_exception(ctx, POWERPC_EXCP_FPU); \
3343 return; \
3345 if (unlikely(rA(ctx->opcode) == 0)) { \
3346 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3347 return; \
3349 gen_set_access_type(ctx, ACCESS_FLOAT); \
3350 EA = tcg_temp_new(); \
3351 gen_addr_imm_index(ctx, EA, 0); \
3352 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3353 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3354 tcg_temp_free(EA); \
3357 #define GEN_LDUXF(name, ldop, opc, type) \
3358 static void glue(gen_, name##ux)(DisasContext *ctx) \
3360 TCGv EA; \
3361 if (unlikely(!ctx->fpu_enabled)) { \
3362 gen_exception(ctx, POWERPC_EXCP_FPU); \
3363 return; \
3365 if (unlikely(rA(ctx->opcode) == 0)) { \
3366 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3367 return; \
3369 gen_set_access_type(ctx, ACCESS_FLOAT); \
3370 EA = tcg_temp_new(); \
3371 gen_addr_reg_index(ctx, EA); \
3372 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3373 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3374 tcg_temp_free(EA); \
3377 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3378 static void glue(gen_, name##x)(DisasContext *ctx) \
3380 TCGv EA; \
3381 if (unlikely(!ctx->fpu_enabled)) { \
3382 gen_exception(ctx, POWERPC_EXCP_FPU); \
3383 return; \
3385 gen_set_access_type(ctx, ACCESS_FLOAT); \
3386 EA = tcg_temp_new(); \
3387 gen_addr_reg_index(ctx, EA); \
3388 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3389 tcg_temp_free(EA); \
3392 #define GEN_LDFS(name, ldop, op, type) \
3393 GEN_LDF(name, ldop, op | 0x20, type); \
3394 GEN_LDUF(name, ldop, op | 0x21, type); \
3395 GEN_LDUXF(name, ldop, op | 0x01, type); \
3396 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3398 static inline void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3400 TCGv t0 = tcg_temp_new();
3401 TCGv_i32 t1 = tcg_temp_new_i32();
3402 gen_qemu_ld32u(ctx, t0, arg2);
3403 tcg_gen_trunc_tl_i32(t1, t0);
3404 tcg_temp_free(t0);
3405 gen_helper_float32_to_float64(arg1, cpu_env, t1);
3406 tcg_temp_free_i32(t1);
3409 /* lfd lfdu lfdux lfdx */
3410 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT);
3411 /* lfs lfsu lfsux lfsx */
3412 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
3414 /* lfdp */
3415 static void gen_lfdp(DisasContext *ctx)
3417 TCGv EA;
3418 if (unlikely(!ctx->fpu_enabled)) {
3419 gen_exception(ctx, POWERPC_EXCP_FPU);
3420 return;
3422 gen_set_access_type(ctx, ACCESS_FLOAT);
3423 EA = tcg_temp_new();
3424 gen_addr_imm_index(ctx, EA, 0); \
3425 if (unlikely(ctx->le_mode)) {
3426 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3427 tcg_gen_addi_tl(EA, EA, 8);
3428 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3429 } else {
3430 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3431 tcg_gen_addi_tl(EA, EA, 8);
3432 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3434 tcg_temp_free(EA);
3437 /* lfdpx */
3438 static void gen_lfdpx(DisasContext *ctx)
3440 TCGv EA;
3441 if (unlikely(!ctx->fpu_enabled)) {
3442 gen_exception(ctx, POWERPC_EXCP_FPU);
3443 return;
3445 gen_set_access_type(ctx, ACCESS_FLOAT);
3446 EA = tcg_temp_new();
3447 gen_addr_reg_index(ctx, EA);
3448 if (unlikely(ctx->le_mode)) {
3449 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3450 tcg_gen_addi_tl(EA, EA, 8);
3451 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3452 } else {
3453 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3454 tcg_gen_addi_tl(EA, EA, 8);
3455 gen_qemu_ld64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3457 tcg_temp_free(EA);
3460 /* lfiwax */
3461 static void gen_lfiwax(DisasContext *ctx)
3463 TCGv EA;
3464 TCGv t0;
3465 if (unlikely(!ctx->fpu_enabled)) {
3466 gen_exception(ctx, POWERPC_EXCP_FPU);
3467 return;
3469 gen_set_access_type(ctx, ACCESS_FLOAT);
3470 EA = tcg_temp_new();
3471 t0 = tcg_temp_new();
3472 gen_addr_reg_index(ctx, EA);
3473 gen_qemu_ld32s(ctx, t0, EA);
3474 tcg_gen_ext_tl_i64(cpu_fpr[rD(ctx->opcode)], t0);
3475 tcg_temp_free(EA);
3476 tcg_temp_free(t0);
3479 /*** Floating-point store ***/
3480 #define GEN_STF(name, stop, opc, type) \
3481 static void glue(gen_, name)(DisasContext *ctx) \
3483 TCGv EA; \
3484 if (unlikely(!ctx->fpu_enabled)) { \
3485 gen_exception(ctx, POWERPC_EXCP_FPU); \
3486 return; \
3488 gen_set_access_type(ctx, ACCESS_FLOAT); \
3489 EA = tcg_temp_new(); \
3490 gen_addr_imm_index(ctx, EA, 0); \
3491 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3492 tcg_temp_free(EA); \
3495 #define GEN_STUF(name, stop, opc, type) \
3496 static void glue(gen_, name##u)(DisasContext *ctx) \
3498 TCGv EA; \
3499 if (unlikely(!ctx->fpu_enabled)) { \
3500 gen_exception(ctx, POWERPC_EXCP_FPU); \
3501 return; \
3503 if (unlikely(rA(ctx->opcode) == 0)) { \
3504 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3505 return; \
3507 gen_set_access_type(ctx, ACCESS_FLOAT); \
3508 EA = tcg_temp_new(); \
3509 gen_addr_imm_index(ctx, EA, 0); \
3510 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3511 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3512 tcg_temp_free(EA); \
3515 #define GEN_STUXF(name, stop, opc, type) \
3516 static void glue(gen_, name##ux)(DisasContext *ctx) \
3518 TCGv EA; \
3519 if (unlikely(!ctx->fpu_enabled)) { \
3520 gen_exception(ctx, POWERPC_EXCP_FPU); \
3521 return; \
3523 if (unlikely(rA(ctx->opcode) == 0)) { \
3524 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3525 return; \
3527 gen_set_access_type(ctx, ACCESS_FLOAT); \
3528 EA = tcg_temp_new(); \
3529 gen_addr_reg_index(ctx, EA); \
3530 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3531 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3532 tcg_temp_free(EA); \
3535 #define GEN_STXF(name, stop, opc2, opc3, type) \
3536 static void glue(gen_, name##x)(DisasContext *ctx) \
3538 TCGv EA; \
3539 if (unlikely(!ctx->fpu_enabled)) { \
3540 gen_exception(ctx, POWERPC_EXCP_FPU); \
3541 return; \
3543 gen_set_access_type(ctx, ACCESS_FLOAT); \
3544 EA = tcg_temp_new(); \
3545 gen_addr_reg_index(ctx, EA); \
3546 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3547 tcg_temp_free(EA); \
3550 #define GEN_STFS(name, stop, op, type) \
3551 GEN_STF(name, stop, op | 0x20, type); \
3552 GEN_STUF(name, stop, op | 0x21, type); \
3553 GEN_STUXF(name, stop, op | 0x01, type); \
3554 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3556 static inline void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3558 TCGv_i32 t0 = tcg_temp_new_i32();
3559 TCGv t1 = tcg_temp_new();
3560 gen_helper_float64_to_float32(t0, cpu_env, arg1);
3561 tcg_gen_extu_i32_tl(t1, t0);
3562 tcg_temp_free_i32(t0);
3563 gen_qemu_st32(ctx, t1, arg2);
3564 tcg_temp_free(t1);
3567 /* stfd stfdu stfdux stfdx */
3568 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT);
3569 /* stfs stfsu stfsux stfsx */
3570 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
3572 /* stfdp */
3573 static void gen_stfdp(DisasContext *ctx)
3575 TCGv EA;
3576 if (unlikely(!ctx->fpu_enabled)) {
3577 gen_exception(ctx, POWERPC_EXCP_FPU);
3578 return;
3580 gen_set_access_type(ctx, ACCESS_FLOAT);
3581 EA = tcg_temp_new();
3582 gen_addr_imm_index(ctx, EA, 0); \
3583 if (unlikely(ctx->le_mode)) {
3584 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3585 tcg_gen_addi_tl(EA, EA, 8);
3586 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3587 } else {
3588 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3589 tcg_gen_addi_tl(EA, EA, 8);
3590 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3592 tcg_temp_free(EA);
3595 /* stfdpx */
3596 static void gen_stfdpx(DisasContext *ctx)
3598 TCGv EA;
3599 if (unlikely(!ctx->fpu_enabled)) {
3600 gen_exception(ctx, POWERPC_EXCP_FPU);
3601 return;
3603 gen_set_access_type(ctx, ACCESS_FLOAT);
3604 EA = tcg_temp_new();
3605 gen_addr_reg_index(ctx, EA);
3606 if (unlikely(ctx->le_mode)) {
3607 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3608 tcg_gen_addi_tl(EA, EA, 8);
3609 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3610 } else {
3611 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
3612 tcg_gen_addi_tl(EA, EA, 8);
3613 gen_qemu_st64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
3615 tcg_temp_free(EA);
3618 /* Optional: */
3619 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
3621 TCGv t0 = tcg_temp_new();
3622 tcg_gen_trunc_i64_tl(t0, arg1),
3623 gen_qemu_st32(ctx, t0, arg2);
3624 tcg_temp_free(t0);
3626 /* stfiwx */
3627 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3629 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
3631 #if defined(TARGET_PPC64)
3632 if (ctx->has_cfar)
3633 tcg_gen_movi_tl(cpu_cfar, nip);
3634 #endif
3637 /*** Branch ***/
3638 static inline void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3640 TranslationBlock *tb;
3641 tb = ctx->tb;
3642 if (NARROW_MODE(ctx)) {
3643 dest = (uint32_t) dest;
3645 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3646 likely(!ctx->singlestep_enabled)) {
3647 tcg_gen_goto_tb(n);
3648 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3649 tcg_gen_exit_tb((uintptr_t)tb + n);
3650 } else {
3651 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3652 if (unlikely(ctx->singlestep_enabled)) {
3653 if ((ctx->singlestep_enabled &
3654 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3655 (ctx->exception == POWERPC_EXCP_BRANCH ||
3656 ctx->exception == POWERPC_EXCP_TRACE)) {
3657 target_ulong tmp = ctx->nip;
3658 ctx->nip = dest;
3659 gen_exception(ctx, POWERPC_EXCP_TRACE);
3660 ctx->nip = tmp;
3662 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3663 gen_debug_exception(ctx);
3666 tcg_gen_exit_tb(0);
3670 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3672 if (NARROW_MODE(ctx)) {
3673 nip = (uint32_t)nip;
3675 tcg_gen_movi_tl(cpu_lr, nip);
3678 /* b ba bl bla */
3679 static void gen_b(DisasContext *ctx)
3681 target_ulong li, target;
3683 ctx->exception = POWERPC_EXCP_BRANCH;
3684 /* sign extend LI */
3685 li = LI(ctx->opcode);
3686 li = (li ^ 0x02000000) - 0x02000000;
3687 if (likely(AA(ctx->opcode) == 0)) {
3688 target = ctx->nip + li - 4;
3689 } else {
3690 target = li;
3692 if (LK(ctx->opcode)) {
3693 gen_setlr(ctx, ctx->nip);
3695 gen_update_cfar(ctx, ctx->nip);
3696 gen_goto_tb(ctx, 0, target);
3699 #define BCOND_IM 0
3700 #define BCOND_LR 1
3701 #define BCOND_CTR 2
3703 static inline void gen_bcond(DisasContext *ctx, int type)
3705 uint32_t bo = BO(ctx->opcode);
3706 int l1;
3707 TCGv target;
3709 ctx->exception = POWERPC_EXCP_BRANCH;
3710 if (type == BCOND_LR || type == BCOND_CTR) {
3711 target = tcg_temp_local_new();
3712 if (type == BCOND_CTR)
3713 tcg_gen_mov_tl(target, cpu_ctr);
3714 else
3715 tcg_gen_mov_tl(target, cpu_lr);
3716 } else {
3717 TCGV_UNUSED(target);
3719 if (LK(ctx->opcode))
3720 gen_setlr(ctx, ctx->nip);
3721 l1 = gen_new_label();
3722 if ((bo & 0x4) == 0) {
3723 /* Decrement and test CTR */
3724 TCGv temp = tcg_temp_new();
3725 if (unlikely(type == BCOND_CTR)) {
3726 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3727 return;
3729 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3730 if (NARROW_MODE(ctx)) {
3731 tcg_gen_ext32u_tl(temp, cpu_ctr);
3732 } else {
3733 tcg_gen_mov_tl(temp, cpu_ctr);
3735 if (bo & 0x2) {
3736 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3737 } else {
3738 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3740 tcg_temp_free(temp);
3742 if ((bo & 0x10) == 0) {
3743 /* Test CR */
3744 uint32_t bi = BI(ctx->opcode);
3745 uint32_t mask = 1 << (3 - (bi & 0x03));
3746 TCGv_i32 temp = tcg_temp_new_i32();
3748 if (bo & 0x8) {
3749 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3750 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3751 } else {
3752 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3753 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3755 tcg_temp_free_i32(temp);
3757 gen_update_cfar(ctx, ctx->nip);
3758 if (type == BCOND_IM) {
3759 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3760 if (likely(AA(ctx->opcode) == 0)) {
3761 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3762 } else {
3763 gen_goto_tb(ctx, 0, li);
3765 gen_set_label(l1);
3766 gen_goto_tb(ctx, 1, ctx->nip);
3767 } else {
3768 if (NARROW_MODE(ctx)) {
3769 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3770 } else {
3771 tcg_gen_andi_tl(cpu_nip, target, ~3);
3773 tcg_gen_exit_tb(0);
3774 gen_set_label(l1);
3775 gen_update_nip(ctx, ctx->nip);
3776 tcg_gen_exit_tb(0);
3780 static void gen_bc(DisasContext *ctx)
3782 gen_bcond(ctx, BCOND_IM);
3785 static void gen_bcctr(DisasContext *ctx)
3787 gen_bcond(ctx, BCOND_CTR);
3790 static void gen_bclr(DisasContext *ctx)
3792 gen_bcond(ctx, BCOND_LR);
3795 /*** Condition register logical ***/
3796 #define GEN_CRLOGIC(name, tcg_op, opc) \
3797 static void glue(gen_, name)(DisasContext *ctx) \
3799 uint8_t bitmask; \
3800 int sh; \
3801 TCGv_i32 t0, t1; \
3802 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3803 t0 = tcg_temp_new_i32(); \
3804 if (sh > 0) \
3805 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3806 else if (sh < 0) \
3807 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3808 else \
3809 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3810 t1 = tcg_temp_new_i32(); \
3811 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3812 if (sh > 0) \
3813 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3814 else if (sh < 0) \
3815 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3816 else \
3817 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3818 tcg_op(t0, t0, t1); \
3819 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3820 tcg_gen_andi_i32(t0, t0, bitmask); \
3821 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3822 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3823 tcg_temp_free_i32(t0); \
3824 tcg_temp_free_i32(t1); \
3827 /* crand */
3828 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3829 /* crandc */
3830 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3831 /* creqv */
3832 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3833 /* crnand */
3834 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3835 /* crnor */
3836 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3837 /* cror */
3838 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3839 /* crorc */
3840 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3841 /* crxor */
3842 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3844 /* mcrf */
3845 static void gen_mcrf(DisasContext *ctx)
3847 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3850 /*** System linkage ***/
3852 /* rfi (mem_idx only) */
3853 static void gen_rfi(DisasContext *ctx)
3855 #if defined(CONFIG_USER_ONLY)
3856 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3857 #else
3858 /* Restore CPU state */
3859 if (unlikely(!ctx->mem_idx)) {
3860 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3861 return;
3863 gen_update_cfar(ctx, ctx->nip);
3864 gen_helper_rfi(cpu_env);
3865 gen_sync_exception(ctx);
3866 #endif
3869 #if defined(TARGET_PPC64)
3870 static void gen_rfid(DisasContext *ctx)
3872 #if defined(CONFIG_USER_ONLY)
3873 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3874 #else
3875 /* Restore CPU state */
3876 if (unlikely(!ctx->mem_idx)) {
3877 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3878 return;
3880 gen_update_cfar(ctx, ctx->nip);
3881 gen_helper_rfid(cpu_env);
3882 gen_sync_exception(ctx);
3883 #endif
3886 static void gen_hrfid(DisasContext *ctx)
3888 #if defined(CONFIG_USER_ONLY)
3889 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3890 #else
3891 /* Restore CPU state */
3892 if (unlikely(ctx->mem_idx <= 1)) {
3893 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3894 return;
3896 gen_helper_hrfid(cpu_env);
3897 gen_sync_exception(ctx);
3898 #endif
3900 #endif
3902 /* sc */
3903 #if defined(CONFIG_USER_ONLY)
3904 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3905 #else
3906 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3907 #endif
3908 static void gen_sc(DisasContext *ctx)
3910 uint32_t lev;
3912 lev = (ctx->opcode >> 5) & 0x7F;
3913 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3916 /*** Trap ***/
3918 /* tw */
3919 static void gen_tw(DisasContext *ctx)
3921 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3922 /* Update the nip since this might generate a trap exception */
3923 gen_update_nip(ctx, ctx->nip);
3924 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3925 t0);
3926 tcg_temp_free_i32(t0);
3929 /* twi */
3930 static void gen_twi(DisasContext *ctx)
3932 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3933 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3934 /* Update the nip since this might generate a trap exception */
3935 gen_update_nip(ctx, ctx->nip);
3936 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3937 tcg_temp_free(t0);
3938 tcg_temp_free_i32(t1);
3941 #if defined(TARGET_PPC64)
3942 /* td */
3943 static void gen_td(DisasContext *ctx)
3945 TCGv_i32 t0 = tcg_const_i32(TO(ctx->opcode));
3946 /* Update the nip since this might generate a trap exception */
3947 gen_update_nip(ctx, ctx->nip);
3948 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
3949 t0);
3950 tcg_temp_free_i32(t0);
3953 /* tdi */
3954 static void gen_tdi(DisasContext *ctx)
3956 TCGv t0 = tcg_const_tl(SIMM(ctx->opcode));
3957 TCGv_i32 t1 = tcg_const_i32(TO(ctx->opcode));
3958 /* Update the nip since this might generate a trap exception */
3959 gen_update_nip(ctx, ctx->nip);
3960 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
3961 tcg_temp_free(t0);
3962 tcg_temp_free_i32(t1);
3964 #endif
3966 /*** Processor control ***/
3968 static void gen_read_xer(TCGv dst)
3970 TCGv t0 = tcg_temp_new();
3971 TCGv t1 = tcg_temp_new();
3972 TCGv t2 = tcg_temp_new();
3973 tcg_gen_mov_tl(dst, cpu_xer);
3974 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
3975 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
3976 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
3977 tcg_gen_or_tl(t0, t0, t1);
3978 tcg_gen_or_tl(dst, dst, t2);
3979 tcg_gen_or_tl(dst, dst, t0);
3980 tcg_temp_free(t0);
3981 tcg_temp_free(t1);
3982 tcg_temp_free(t2);
3985 static void gen_write_xer(TCGv src)
3987 tcg_gen_andi_tl(cpu_xer, src,
3988 ~((1u << XER_SO) | (1u << XER_OV) | (1u << XER_CA)));
3989 tcg_gen_shri_tl(cpu_so, src, XER_SO);
3990 tcg_gen_shri_tl(cpu_ov, src, XER_OV);
3991 tcg_gen_shri_tl(cpu_ca, src, XER_CA);
3992 tcg_gen_andi_tl(cpu_so, cpu_so, 1);
3993 tcg_gen_andi_tl(cpu_ov, cpu_ov, 1);
3994 tcg_gen_andi_tl(cpu_ca, cpu_ca, 1);
3997 /* mcrxr */
3998 static void gen_mcrxr(DisasContext *ctx)
4000 TCGv_i32 t0 = tcg_temp_new_i32();
4001 TCGv_i32 t1 = tcg_temp_new_i32();
4002 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4004 tcg_gen_trunc_tl_i32(t0, cpu_so);
4005 tcg_gen_trunc_tl_i32(t1, cpu_ov);
4006 tcg_gen_trunc_tl_i32(dst, cpu_ca);
4007 tcg_gen_shri_i32(t0, t0, 2);
4008 tcg_gen_shri_i32(t1, t1, 1);
4009 tcg_gen_or_i32(dst, dst, t0);
4010 tcg_gen_or_i32(dst, dst, t1);
4011 tcg_temp_free_i32(t0);
4012 tcg_temp_free_i32(t1);
4014 tcg_gen_movi_tl(cpu_so, 0);
4015 tcg_gen_movi_tl(cpu_ov, 0);
4016 tcg_gen_movi_tl(cpu_ca, 0);
4019 /* mfcr mfocrf */
4020 static void gen_mfcr(DisasContext *ctx)
4022 uint32_t crm, crn;
4024 if (likely(ctx->opcode & 0x00100000)) {
4025 crm = CRM(ctx->opcode);
4026 if (likely(crm && ((crm & (crm - 1)) == 0))) {
4027 crn = ctz32 (crm);
4028 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4029 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4030 cpu_gpr[rD(ctx->opcode)], crn * 4);
4032 } else {
4033 TCGv_i32 t0 = tcg_temp_new_i32();
4034 tcg_gen_mov_i32(t0, cpu_crf[0]);
4035 tcg_gen_shli_i32(t0, t0, 4);
4036 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4037 tcg_gen_shli_i32(t0, t0, 4);
4038 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4039 tcg_gen_shli_i32(t0, t0, 4);
4040 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4041 tcg_gen_shli_i32(t0, t0, 4);
4042 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4043 tcg_gen_shli_i32(t0, t0, 4);
4044 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4045 tcg_gen_shli_i32(t0, t0, 4);
4046 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4047 tcg_gen_shli_i32(t0, t0, 4);
4048 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4049 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4050 tcg_temp_free_i32(t0);
4054 /* mfmsr */
4055 static void gen_mfmsr(DisasContext *ctx)
4057 #if defined(CONFIG_USER_ONLY)
4058 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4059 #else
4060 if (unlikely(!ctx->mem_idx)) {
4061 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4062 return;
4064 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4065 #endif
4068 static void spr_noaccess(void *opaque, int gprn, int sprn)
4070 #if 0
4071 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
4072 printf("ERROR: try to access SPR %d !\n", sprn);
4073 #endif
4075 #define SPR_NOACCESS (&spr_noaccess)
4077 /* mfspr */
4078 static inline void gen_op_mfspr(DisasContext *ctx)
4080 void (*read_cb)(void *opaque, int gprn, int sprn);
4081 uint32_t sprn = SPR(ctx->opcode);
4083 #if !defined(CONFIG_USER_ONLY)
4084 if (ctx->mem_idx == 2)
4085 read_cb = ctx->spr_cb[sprn].hea_read;
4086 else if (ctx->mem_idx)
4087 read_cb = ctx->spr_cb[sprn].oea_read;
4088 else
4089 #endif
4090 read_cb = ctx->spr_cb[sprn].uea_read;
4091 if (likely(read_cb != NULL)) {
4092 if (likely(read_cb != SPR_NOACCESS)) {
4093 (*read_cb)(ctx, rD(ctx->opcode), sprn);
4094 } else {
4095 /* Privilege exception */
4096 /* This is a hack to avoid warnings when running Linux:
4097 * this OS breaks the PowerPC virtualisation model,
4098 * allowing userland application to read the PVR
4100 if (sprn != SPR_PVR) {
4101 qemu_log("Trying to read privileged spr %d (0x%03x) at "
4102 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4103 printf("Trying to read privileged spr %d (0x%03x) at "
4104 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4106 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4108 } else {
4109 /* Not defined */
4110 qemu_log("Trying to read invalid spr %d (0x%03x) at "
4111 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4112 printf("Trying to read invalid spr %d (0x%03x) at "
4113 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4114 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4118 static void gen_mfspr(DisasContext *ctx)
4120 gen_op_mfspr(ctx);
4123 /* mftb */
4124 static void gen_mftb(DisasContext *ctx)
4126 gen_op_mfspr(ctx);
4129 /* mtcrf mtocrf*/
4130 static void gen_mtcrf(DisasContext *ctx)
4132 uint32_t crm, crn;
4134 crm = CRM(ctx->opcode);
4135 if (likely((ctx->opcode & 0x00100000))) {
4136 if (crm && ((crm & (crm - 1)) == 0)) {
4137 TCGv_i32 temp = tcg_temp_new_i32();
4138 crn = ctz32 (crm);
4139 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4140 tcg_gen_shri_i32(temp, temp, crn * 4);
4141 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4142 tcg_temp_free_i32(temp);
4144 } else {
4145 TCGv_i32 temp = tcg_temp_new_i32();
4146 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4147 for (crn = 0 ; crn < 8 ; crn++) {
4148 if (crm & (1 << crn)) {
4149 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4150 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4153 tcg_temp_free_i32(temp);
4157 /* mtmsr */
4158 #if defined(TARGET_PPC64)
4159 static void gen_mtmsrd(DisasContext *ctx)
4161 #if defined(CONFIG_USER_ONLY)
4162 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4163 #else
4164 if (unlikely(!ctx->mem_idx)) {
4165 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4166 return;
4168 if (ctx->opcode & 0x00010000) {
4169 /* Special form that does not need any synchronisation */
4170 TCGv t0 = tcg_temp_new();
4171 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4172 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4173 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4174 tcg_temp_free(t0);
4175 } else {
4176 /* XXX: we need to update nip before the store
4177 * if we enter power saving mode, we will exit the loop
4178 * directly from ppc_store_msr
4180 gen_update_nip(ctx, ctx->nip);
4181 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
4182 /* Must stop the translation as machine state (may have) changed */
4183 /* Note that mtmsr is not always defined as context-synchronizing */
4184 gen_stop_exception(ctx);
4186 #endif
4188 #endif
4190 static void gen_mtmsr(DisasContext *ctx)
4192 #if defined(CONFIG_USER_ONLY)
4193 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4194 #else
4195 if (unlikely(!ctx->mem_idx)) {
4196 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4197 return;
4199 if (ctx->opcode & 0x00010000) {
4200 /* Special form that does not need any synchronisation */
4201 TCGv t0 = tcg_temp_new();
4202 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1 << MSR_RI) | (1 << MSR_EE));
4203 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~((1 << MSR_RI) | (1 << MSR_EE)));
4204 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
4205 tcg_temp_free(t0);
4206 } else {
4207 TCGv msr = tcg_temp_new();
4209 /* XXX: we need to update nip before the store
4210 * if we enter power saving mode, we will exit the loop
4211 * directly from ppc_store_msr
4213 gen_update_nip(ctx, ctx->nip);
4214 #if defined(TARGET_PPC64)
4215 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
4216 #else
4217 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
4218 #endif
4219 gen_helper_store_msr(cpu_env, msr);
4220 /* Must stop the translation as machine state (may have) changed */
4221 /* Note that mtmsr is not always defined as context-synchronizing */
4222 gen_stop_exception(ctx);
4224 #endif
4227 /* mtspr */
4228 static void gen_mtspr(DisasContext *ctx)
4230 void (*write_cb)(void *opaque, int sprn, int gprn);
4231 uint32_t sprn = SPR(ctx->opcode);
4233 #if !defined(CONFIG_USER_ONLY)
4234 if (ctx->mem_idx == 2)
4235 write_cb = ctx->spr_cb[sprn].hea_write;
4236 else if (ctx->mem_idx)
4237 write_cb = ctx->spr_cb[sprn].oea_write;
4238 else
4239 #endif
4240 write_cb = ctx->spr_cb[sprn].uea_write;
4241 if (likely(write_cb != NULL)) {
4242 if (likely(write_cb != SPR_NOACCESS)) {
4243 (*write_cb)(ctx, sprn, rS(ctx->opcode));
4244 } else {
4245 /* Privilege exception */
4246 qemu_log("Trying to write privileged spr %d (0x%03x) at "
4247 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4248 printf("Trying to write privileged spr %d (0x%03x) at "
4249 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4250 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4252 } else {
4253 /* Not defined */
4254 qemu_log("Trying to write invalid spr %d (0x%03x) at "
4255 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4256 printf("Trying to write invalid spr %d (0x%03x) at "
4257 TARGET_FMT_lx "\n", sprn, sprn, ctx->nip - 4);
4258 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4262 /*** Cache management ***/
4264 /* dcbf */
4265 static void gen_dcbf(DisasContext *ctx)
4267 /* XXX: specification says this is treated as a load by the MMU */
4268 TCGv t0;
4269 gen_set_access_type(ctx, ACCESS_CACHE);
4270 t0 = tcg_temp_new();
4271 gen_addr_reg_index(ctx, t0);
4272 gen_qemu_ld8u(ctx, t0, t0);
4273 tcg_temp_free(t0);
4276 /* dcbi (Supervisor only) */
4277 static void gen_dcbi(DisasContext *ctx)
4279 #if defined(CONFIG_USER_ONLY)
4280 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4281 #else
4282 TCGv EA, val;
4283 if (unlikely(!ctx->mem_idx)) {
4284 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4285 return;
4287 EA = tcg_temp_new();
4288 gen_set_access_type(ctx, ACCESS_CACHE);
4289 gen_addr_reg_index(ctx, EA);
4290 val = tcg_temp_new();
4291 /* XXX: specification says this should be treated as a store by the MMU */
4292 gen_qemu_ld8u(ctx, val, EA);
4293 gen_qemu_st8(ctx, val, EA);
4294 tcg_temp_free(val);
4295 tcg_temp_free(EA);
4296 #endif
4299 /* dcdst */
4300 static void gen_dcbst(DisasContext *ctx)
4302 /* XXX: specification say this is treated as a load by the MMU */
4303 TCGv t0;
4304 gen_set_access_type(ctx, ACCESS_CACHE);
4305 t0 = tcg_temp_new();
4306 gen_addr_reg_index(ctx, t0);
4307 gen_qemu_ld8u(ctx, t0, t0);
4308 tcg_temp_free(t0);
4311 /* dcbt */
4312 static void gen_dcbt(DisasContext *ctx)
4314 /* interpreted as no-op */
4315 /* XXX: specification say this is treated as a load by the MMU
4316 * but does not generate any exception
4320 /* dcbtst */
4321 static void gen_dcbtst(DisasContext *ctx)
4323 /* interpreted as no-op */
4324 /* XXX: specification say this is treated as a load by the MMU
4325 * but does not generate any exception
4329 /* dcbz */
4330 static void gen_dcbz(DisasContext *ctx)
4332 TCGv tcgv_addr;
4333 TCGv_i32 tcgv_is_dcbzl;
4334 int is_dcbzl = ctx->opcode & 0x00200000 ? 1 : 0;
4336 gen_set_access_type(ctx, ACCESS_CACHE);
4337 /* NIP cannot be restored if the memory exception comes from an helper */
4338 gen_update_nip(ctx, ctx->nip - 4);
4339 tcgv_addr = tcg_temp_new();
4340 tcgv_is_dcbzl = tcg_const_i32(is_dcbzl);
4342 gen_addr_reg_index(ctx, tcgv_addr);
4343 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_is_dcbzl);
4345 tcg_temp_free(tcgv_addr);
4346 tcg_temp_free_i32(tcgv_is_dcbzl);
4349 /* dst / dstt */
4350 static void gen_dst(DisasContext *ctx)
4352 if (rA(ctx->opcode) == 0) {
4353 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4354 } else {
4355 /* interpreted as no-op */
4359 /* dstst /dststt */
4360 static void gen_dstst(DisasContext *ctx)
4362 if (rA(ctx->opcode) == 0) {
4363 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4364 } else {
4365 /* interpreted as no-op */
4370 /* dss / dssall */
4371 static void gen_dss(DisasContext *ctx)
4373 /* interpreted as no-op */
4376 /* icbi */
4377 static void gen_icbi(DisasContext *ctx)
4379 TCGv t0;
4380 gen_set_access_type(ctx, ACCESS_CACHE);
4381 /* NIP cannot be restored if the memory exception comes from an helper */
4382 gen_update_nip(ctx, ctx->nip - 4);
4383 t0 = tcg_temp_new();
4384 gen_addr_reg_index(ctx, t0);
4385 gen_helper_icbi(cpu_env, t0);
4386 tcg_temp_free(t0);
4389 /* Optional: */
4390 /* dcba */
4391 static void gen_dcba(DisasContext *ctx)
4393 /* interpreted as no-op */
4394 /* XXX: specification say this is treated as a store by the MMU
4395 * but does not generate any exception
4399 /*** Segment register manipulation ***/
4400 /* Supervisor only: */
4402 /* mfsr */
4403 static void gen_mfsr(DisasContext *ctx)
4405 #if defined(CONFIG_USER_ONLY)
4406 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4407 #else
4408 TCGv t0;
4409 if (unlikely(!ctx->mem_idx)) {
4410 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4411 return;
4413 t0 = tcg_const_tl(SR(ctx->opcode));
4414 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4415 tcg_temp_free(t0);
4416 #endif
4419 /* mfsrin */
4420 static void gen_mfsrin(DisasContext *ctx)
4422 #if defined(CONFIG_USER_ONLY)
4423 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4424 #else
4425 TCGv t0;
4426 if (unlikely(!ctx->mem_idx)) {
4427 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4428 return;
4430 t0 = tcg_temp_new();
4431 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4432 tcg_gen_andi_tl(t0, t0, 0xF);
4433 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4434 tcg_temp_free(t0);
4435 #endif
4438 /* mtsr */
4439 static void gen_mtsr(DisasContext *ctx)
4441 #if defined(CONFIG_USER_ONLY)
4442 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4443 #else
4444 TCGv t0;
4445 if (unlikely(!ctx->mem_idx)) {
4446 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4447 return;
4449 t0 = tcg_const_tl(SR(ctx->opcode));
4450 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4451 tcg_temp_free(t0);
4452 #endif
4455 /* mtsrin */
4456 static void gen_mtsrin(DisasContext *ctx)
4458 #if defined(CONFIG_USER_ONLY)
4459 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4460 #else
4461 TCGv t0;
4462 if (unlikely(!ctx->mem_idx)) {
4463 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4464 return;
4466 t0 = tcg_temp_new();
4467 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4468 tcg_gen_andi_tl(t0, t0, 0xF);
4469 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
4470 tcg_temp_free(t0);
4471 #endif
4474 #if defined(TARGET_PPC64)
4475 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4477 /* mfsr */
4478 static void gen_mfsr_64b(DisasContext *ctx)
4480 #if defined(CONFIG_USER_ONLY)
4481 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4482 #else
4483 TCGv t0;
4484 if (unlikely(!ctx->mem_idx)) {
4485 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4486 return;
4488 t0 = tcg_const_tl(SR(ctx->opcode));
4489 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4490 tcg_temp_free(t0);
4491 #endif
4494 /* mfsrin */
4495 static void gen_mfsrin_64b(DisasContext *ctx)
4497 #if defined(CONFIG_USER_ONLY)
4498 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4499 #else
4500 TCGv t0;
4501 if (unlikely(!ctx->mem_idx)) {
4502 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4503 return;
4505 t0 = tcg_temp_new();
4506 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4507 tcg_gen_andi_tl(t0, t0, 0xF);
4508 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4509 tcg_temp_free(t0);
4510 #endif
4513 /* mtsr */
4514 static void gen_mtsr_64b(DisasContext *ctx)
4516 #if defined(CONFIG_USER_ONLY)
4517 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4518 #else
4519 TCGv t0;
4520 if (unlikely(!ctx->mem_idx)) {
4521 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4522 return;
4524 t0 = tcg_const_tl(SR(ctx->opcode));
4525 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4526 tcg_temp_free(t0);
4527 #endif
4530 /* mtsrin */
4531 static void gen_mtsrin_64b(DisasContext *ctx)
4533 #if defined(CONFIG_USER_ONLY)
4534 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4535 #else
4536 TCGv t0;
4537 if (unlikely(!ctx->mem_idx)) {
4538 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4539 return;
4541 t0 = tcg_temp_new();
4542 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 28);
4543 tcg_gen_andi_tl(t0, t0, 0xF);
4544 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
4545 tcg_temp_free(t0);
4546 #endif
4549 /* slbmte */
4550 static void gen_slbmte(DisasContext *ctx)
4552 #if defined(CONFIG_USER_ONLY)
4553 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4554 #else
4555 if (unlikely(!ctx->mem_idx)) {
4556 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4557 return;
4559 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
4560 cpu_gpr[rS(ctx->opcode)]);
4561 #endif
4564 static void gen_slbmfee(DisasContext *ctx)
4566 #if defined(CONFIG_USER_ONLY)
4567 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4568 #else
4569 if (unlikely(!ctx->mem_idx)) {
4570 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4571 return;
4573 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4574 cpu_gpr[rB(ctx->opcode)]);
4575 #endif
4578 static void gen_slbmfev(DisasContext *ctx)
4580 #if defined(CONFIG_USER_ONLY)
4581 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4582 #else
4583 if (unlikely(!ctx->mem_idx)) {
4584 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
4585 return;
4587 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
4588 cpu_gpr[rB(ctx->opcode)]);
4589 #endif
4591 #endif /* defined(TARGET_PPC64) */
4593 /*** Lookaside buffer management ***/
4594 /* Optional & mem_idx only: */
4596 /* tlbia */
4597 static void gen_tlbia(DisasContext *ctx)
4599 #if defined(CONFIG_USER_ONLY)
4600 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4601 #else
4602 if (unlikely(!ctx->mem_idx)) {
4603 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4604 return;
4606 gen_helper_tlbia(cpu_env);
4607 #endif
4610 /* tlbiel */
4611 static void gen_tlbiel(DisasContext *ctx)
4613 #if defined(CONFIG_USER_ONLY)
4614 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4615 #else
4616 if (unlikely(!ctx->mem_idx)) {
4617 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4618 return;
4620 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4621 #endif
4624 /* tlbie */
4625 static void gen_tlbie(DisasContext *ctx)
4627 #if defined(CONFIG_USER_ONLY)
4628 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4629 #else
4630 if (unlikely(!ctx->mem_idx)) {
4631 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4632 return;
4634 if (NARROW_MODE(ctx)) {
4635 TCGv t0 = tcg_temp_new();
4636 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
4637 gen_helper_tlbie(cpu_env, t0);
4638 tcg_temp_free(t0);
4639 } else {
4640 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4642 #endif
4645 /* tlbsync */
4646 static void gen_tlbsync(DisasContext *ctx)
4648 #if defined(CONFIG_USER_ONLY)
4649 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4650 #else
4651 if (unlikely(!ctx->mem_idx)) {
4652 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4653 return;
4655 /* This has no effect: it should ensure that all previous
4656 * tlbie have completed
4658 gen_stop_exception(ctx);
4659 #endif
4662 #if defined(TARGET_PPC64)
4663 /* slbia */
4664 static void gen_slbia(DisasContext *ctx)
4666 #if defined(CONFIG_USER_ONLY)
4667 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4668 #else
4669 if (unlikely(!ctx->mem_idx)) {
4670 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4671 return;
4673 gen_helper_slbia(cpu_env);
4674 #endif
4677 /* slbie */
4678 static void gen_slbie(DisasContext *ctx)
4680 #if defined(CONFIG_USER_ONLY)
4681 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4682 #else
4683 if (unlikely(!ctx->mem_idx)) {
4684 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
4685 return;
4687 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
4688 #endif
4690 #endif
4692 /*** External control ***/
4693 /* Optional: */
4695 /* eciwx */
4696 static void gen_eciwx(DisasContext *ctx)
4698 TCGv t0;
4699 /* Should check EAR[E] ! */
4700 gen_set_access_type(ctx, ACCESS_EXT);
4701 t0 = tcg_temp_new();
4702 gen_addr_reg_index(ctx, t0);
4703 gen_check_align(ctx, t0, 0x03);
4704 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4705 tcg_temp_free(t0);
4708 /* ecowx */
4709 static void gen_ecowx(DisasContext *ctx)
4711 TCGv t0;
4712 /* Should check EAR[E] ! */
4713 gen_set_access_type(ctx, ACCESS_EXT);
4714 t0 = tcg_temp_new();
4715 gen_addr_reg_index(ctx, t0);
4716 gen_check_align(ctx, t0, 0x03);
4717 gen_qemu_st32(ctx, cpu_gpr[rD(ctx->opcode)], t0);
4718 tcg_temp_free(t0);
4721 /* PowerPC 601 specific instructions */
4723 /* abs - abs. */
4724 static void gen_abs(DisasContext *ctx)
4726 int l1 = gen_new_label();
4727 int l2 = gen_new_label();
4728 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l1);
4729 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4730 tcg_gen_br(l2);
4731 gen_set_label(l1);
4732 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4733 gen_set_label(l2);
4734 if (unlikely(Rc(ctx->opcode) != 0))
4735 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4738 /* abso - abso. */
4739 static void gen_abso(DisasContext *ctx)
4741 int l1 = gen_new_label();
4742 int l2 = gen_new_label();
4743 int l3 = gen_new_label();
4744 /* Start with XER OV disabled, the most likely case */
4745 tcg_gen_movi_tl(cpu_ov, 0);
4746 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rA(ctx->opcode)], 0, l2);
4747 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_gpr[rA(ctx->opcode)], 0x80000000, l1);
4748 tcg_gen_movi_tl(cpu_ov, 1);
4749 tcg_gen_movi_tl(cpu_so, 1);
4750 tcg_gen_br(l2);
4751 gen_set_label(l1);
4752 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4753 tcg_gen_br(l3);
4754 gen_set_label(l2);
4755 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4756 gen_set_label(l3);
4757 if (unlikely(Rc(ctx->opcode) != 0))
4758 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4761 /* clcs */
4762 static void gen_clcs(DisasContext *ctx)
4764 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
4765 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
4766 tcg_temp_free_i32(t0);
4767 /* Rc=1 sets CR0 to an undefined state */
4770 /* div - div. */
4771 static void gen_div(DisasContext *ctx)
4773 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4774 cpu_gpr[rB(ctx->opcode)]);
4775 if (unlikely(Rc(ctx->opcode) != 0))
4776 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4779 /* divo - divo. */
4780 static void gen_divo(DisasContext *ctx)
4782 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4783 cpu_gpr[rB(ctx->opcode)]);
4784 if (unlikely(Rc(ctx->opcode) != 0))
4785 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4788 /* divs - divs. */
4789 static void gen_divs(DisasContext *ctx)
4791 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
4792 cpu_gpr[rB(ctx->opcode)]);
4793 if (unlikely(Rc(ctx->opcode) != 0))
4794 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4797 /* divso - divso. */
4798 static void gen_divso(DisasContext *ctx)
4800 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
4801 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4802 if (unlikely(Rc(ctx->opcode) != 0))
4803 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4806 /* doz - doz. */
4807 static void gen_doz(DisasContext *ctx)
4809 int l1 = gen_new_label();
4810 int l2 = gen_new_label();
4811 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4812 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4813 tcg_gen_br(l2);
4814 gen_set_label(l1);
4815 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4816 gen_set_label(l2);
4817 if (unlikely(Rc(ctx->opcode) != 0))
4818 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4821 /* dozo - dozo. */
4822 static void gen_dozo(DisasContext *ctx)
4824 int l1 = gen_new_label();
4825 int l2 = gen_new_label();
4826 TCGv t0 = tcg_temp_new();
4827 TCGv t1 = tcg_temp_new();
4828 TCGv t2 = tcg_temp_new();
4829 /* Start with XER OV disabled, the most likely case */
4830 tcg_gen_movi_tl(cpu_ov, 0);
4831 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], l1);
4832 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4833 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4834 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
4835 tcg_gen_andc_tl(t1, t1, t2);
4836 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
4837 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
4838 tcg_gen_movi_tl(cpu_ov, 1);
4839 tcg_gen_movi_tl(cpu_so, 1);
4840 tcg_gen_br(l2);
4841 gen_set_label(l1);
4842 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4843 gen_set_label(l2);
4844 tcg_temp_free(t0);
4845 tcg_temp_free(t1);
4846 tcg_temp_free(t2);
4847 if (unlikely(Rc(ctx->opcode) != 0))
4848 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4851 /* dozi */
4852 static void gen_dozi(DisasContext *ctx)
4854 target_long simm = SIMM(ctx->opcode);
4855 int l1 = gen_new_label();
4856 int l2 = gen_new_label();
4857 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
4858 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
4859 tcg_gen_br(l2);
4860 gen_set_label(l1);
4861 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
4862 gen_set_label(l2);
4863 if (unlikely(Rc(ctx->opcode) != 0))
4864 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4867 /* lscbx - lscbx. */
4868 static void gen_lscbx(DisasContext *ctx)
4870 TCGv t0 = tcg_temp_new();
4871 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
4872 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
4873 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
4875 gen_addr_reg_index(ctx, t0);
4876 /* NIP cannot be restored if the memory exception comes from an helper */
4877 gen_update_nip(ctx, ctx->nip - 4);
4878 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
4879 tcg_temp_free_i32(t1);
4880 tcg_temp_free_i32(t2);
4881 tcg_temp_free_i32(t3);
4882 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4883 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
4884 if (unlikely(Rc(ctx->opcode) != 0))
4885 gen_set_Rc0(ctx, t0);
4886 tcg_temp_free(t0);
4889 /* maskg - maskg. */
4890 static void gen_maskg(DisasContext *ctx)
4892 int l1 = gen_new_label();
4893 TCGv t0 = tcg_temp_new();
4894 TCGv t1 = tcg_temp_new();
4895 TCGv t2 = tcg_temp_new();
4896 TCGv t3 = tcg_temp_new();
4897 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
4898 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
4899 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
4900 tcg_gen_addi_tl(t2, t0, 1);
4901 tcg_gen_shr_tl(t2, t3, t2);
4902 tcg_gen_shr_tl(t3, t3, t1);
4903 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
4904 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
4905 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4906 gen_set_label(l1);
4907 tcg_temp_free(t0);
4908 tcg_temp_free(t1);
4909 tcg_temp_free(t2);
4910 tcg_temp_free(t3);
4911 if (unlikely(Rc(ctx->opcode) != 0))
4912 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4915 /* maskir - maskir. */
4916 static void gen_maskir(DisasContext *ctx)
4918 TCGv t0 = tcg_temp_new();
4919 TCGv t1 = tcg_temp_new();
4920 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4921 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
4922 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
4923 tcg_temp_free(t0);
4924 tcg_temp_free(t1);
4925 if (unlikely(Rc(ctx->opcode) != 0))
4926 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
4929 /* mul - mul. */
4930 static void gen_mul(DisasContext *ctx)
4932 TCGv_i64 t0 = tcg_temp_new_i64();
4933 TCGv_i64 t1 = tcg_temp_new_i64();
4934 TCGv t2 = tcg_temp_new();
4935 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4936 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4937 tcg_gen_mul_i64(t0, t0, t1);
4938 tcg_gen_trunc_i64_tl(t2, t0);
4939 gen_store_spr(SPR_MQ, t2);
4940 tcg_gen_shri_i64(t1, t0, 32);
4941 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4942 tcg_temp_free_i64(t0);
4943 tcg_temp_free_i64(t1);
4944 tcg_temp_free(t2);
4945 if (unlikely(Rc(ctx->opcode) != 0))
4946 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4949 /* mulo - mulo. */
4950 static void gen_mulo(DisasContext *ctx)
4952 int l1 = gen_new_label();
4953 TCGv_i64 t0 = tcg_temp_new_i64();
4954 TCGv_i64 t1 = tcg_temp_new_i64();
4955 TCGv t2 = tcg_temp_new();
4956 /* Start with XER OV disabled, the most likely case */
4957 tcg_gen_movi_tl(cpu_ov, 0);
4958 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
4959 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
4960 tcg_gen_mul_i64(t0, t0, t1);
4961 tcg_gen_trunc_i64_tl(t2, t0);
4962 gen_store_spr(SPR_MQ, t2);
4963 tcg_gen_shri_i64(t1, t0, 32);
4964 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
4965 tcg_gen_ext32s_i64(t1, t0);
4966 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
4967 tcg_gen_movi_tl(cpu_ov, 1);
4968 tcg_gen_movi_tl(cpu_so, 1);
4969 gen_set_label(l1);
4970 tcg_temp_free_i64(t0);
4971 tcg_temp_free_i64(t1);
4972 tcg_temp_free(t2);
4973 if (unlikely(Rc(ctx->opcode) != 0))
4974 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4977 /* nabs - nabs. */
4978 static void gen_nabs(DisasContext *ctx)
4980 int l1 = gen_new_label();
4981 int l2 = gen_new_label();
4982 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4983 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4984 tcg_gen_br(l2);
4985 gen_set_label(l1);
4986 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4987 gen_set_label(l2);
4988 if (unlikely(Rc(ctx->opcode) != 0))
4989 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
4992 /* nabso - nabso. */
4993 static void gen_nabso(DisasContext *ctx)
4995 int l1 = gen_new_label();
4996 int l2 = gen_new_label();
4997 tcg_gen_brcondi_tl(TCG_COND_GT, cpu_gpr[rA(ctx->opcode)], 0, l1);
4998 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
4999 tcg_gen_br(l2);
5000 gen_set_label(l1);
5001 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5002 gen_set_label(l2);
5003 /* nabs never overflows */
5004 tcg_gen_movi_tl(cpu_ov, 0);
5005 if (unlikely(Rc(ctx->opcode) != 0))
5006 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5009 /* rlmi - rlmi. */
5010 static void gen_rlmi(DisasContext *ctx)
5012 uint32_t mb = MB(ctx->opcode);
5013 uint32_t me = ME(ctx->opcode);
5014 TCGv t0 = tcg_temp_new();
5015 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5016 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5017 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
5018 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~MASK(mb, me));
5019 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
5020 tcg_temp_free(t0);
5021 if (unlikely(Rc(ctx->opcode) != 0))
5022 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5025 /* rrib - rrib. */
5026 static void gen_rrib(DisasContext *ctx)
5028 TCGv t0 = tcg_temp_new();
5029 TCGv t1 = tcg_temp_new();
5030 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5031 tcg_gen_movi_tl(t1, 0x80000000);
5032 tcg_gen_shr_tl(t1, t1, t0);
5033 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5034 tcg_gen_and_tl(t0, t0, t1);
5035 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
5036 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5037 tcg_temp_free(t0);
5038 tcg_temp_free(t1);
5039 if (unlikely(Rc(ctx->opcode) != 0))
5040 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5043 /* sle - sle. */
5044 static void gen_sle(DisasContext *ctx)
5046 TCGv t0 = tcg_temp_new();
5047 TCGv t1 = tcg_temp_new();
5048 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5049 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5050 tcg_gen_subfi_tl(t1, 32, t1);
5051 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5052 tcg_gen_or_tl(t1, t0, t1);
5053 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5054 gen_store_spr(SPR_MQ, t1);
5055 tcg_temp_free(t0);
5056 tcg_temp_free(t1);
5057 if (unlikely(Rc(ctx->opcode) != 0))
5058 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5061 /* sleq - sleq. */
5062 static void gen_sleq(DisasContext *ctx)
5064 TCGv t0 = tcg_temp_new();
5065 TCGv t1 = tcg_temp_new();
5066 TCGv t2 = tcg_temp_new();
5067 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5068 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
5069 tcg_gen_shl_tl(t2, t2, t0);
5070 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5071 gen_load_spr(t1, SPR_MQ);
5072 gen_store_spr(SPR_MQ, t0);
5073 tcg_gen_and_tl(t0, t0, t2);
5074 tcg_gen_andc_tl(t1, t1, t2);
5075 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5076 tcg_temp_free(t0);
5077 tcg_temp_free(t1);
5078 tcg_temp_free(t2);
5079 if (unlikely(Rc(ctx->opcode) != 0))
5080 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5083 /* sliq - sliq. */
5084 static void gen_sliq(DisasContext *ctx)
5086 int sh = SH(ctx->opcode);
5087 TCGv t0 = tcg_temp_new();
5088 TCGv t1 = tcg_temp_new();
5089 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5090 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5091 tcg_gen_or_tl(t1, t0, t1);
5092 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5093 gen_store_spr(SPR_MQ, t1);
5094 tcg_temp_free(t0);
5095 tcg_temp_free(t1);
5096 if (unlikely(Rc(ctx->opcode) != 0))
5097 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5100 /* slliq - slliq. */
5101 static void gen_slliq(DisasContext *ctx)
5103 int sh = SH(ctx->opcode);
5104 TCGv t0 = tcg_temp_new();
5105 TCGv t1 = tcg_temp_new();
5106 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5107 gen_load_spr(t1, SPR_MQ);
5108 gen_store_spr(SPR_MQ, t0);
5109 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
5110 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
5111 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5112 tcg_temp_free(t0);
5113 tcg_temp_free(t1);
5114 if (unlikely(Rc(ctx->opcode) != 0))
5115 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5118 /* sllq - sllq. */
5119 static void gen_sllq(DisasContext *ctx)
5121 int l1 = gen_new_label();
5122 int l2 = gen_new_label();
5123 TCGv t0 = tcg_temp_local_new();
5124 TCGv t1 = tcg_temp_local_new();
5125 TCGv t2 = tcg_temp_local_new();
5126 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5127 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5128 tcg_gen_shl_tl(t1, t1, t2);
5129 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5130 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5131 gen_load_spr(t0, SPR_MQ);
5132 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5133 tcg_gen_br(l2);
5134 gen_set_label(l1);
5135 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5136 gen_load_spr(t2, SPR_MQ);
5137 tcg_gen_andc_tl(t1, t2, t1);
5138 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5139 gen_set_label(l2);
5140 tcg_temp_free(t0);
5141 tcg_temp_free(t1);
5142 tcg_temp_free(t2);
5143 if (unlikely(Rc(ctx->opcode) != 0))
5144 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5147 /* slq - slq. */
5148 static void gen_slq(DisasContext *ctx)
5150 int l1 = gen_new_label();
5151 TCGv t0 = tcg_temp_new();
5152 TCGv t1 = tcg_temp_new();
5153 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5154 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5155 tcg_gen_subfi_tl(t1, 32, t1);
5156 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5157 tcg_gen_or_tl(t1, t0, t1);
5158 gen_store_spr(SPR_MQ, t1);
5159 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5160 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5161 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5162 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5163 gen_set_label(l1);
5164 tcg_temp_free(t0);
5165 tcg_temp_free(t1);
5166 if (unlikely(Rc(ctx->opcode) != 0))
5167 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5170 /* sraiq - sraiq. */
5171 static void gen_sraiq(DisasContext *ctx)
5173 int sh = SH(ctx->opcode);
5174 int l1 = gen_new_label();
5175 TCGv t0 = tcg_temp_new();
5176 TCGv t1 = tcg_temp_new();
5177 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5178 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5179 tcg_gen_or_tl(t0, t0, t1);
5180 gen_store_spr(SPR_MQ, t0);
5181 tcg_gen_movi_tl(cpu_ca, 0);
5182 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
5183 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
5184 tcg_gen_movi_tl(cpu_ca, 1);
5185 gen_set_label(l1);
5186 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
5187 tcg_temp_free(t0);
5188 tcg_temp_free(t1);
5189 if (unlikely(Rc(ctx->opcode) != 0))
5190 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5193 /* sraq - sraq. */
5194 static void gen_sraq(DisasContext *ctx)
5196 int l1 = gen_new_label();
5197 int l2 = gen_new_label();
5198 TCGv t0 = tcg_temp_new();
5199 TCGv t1 = tcg_temp_local_new();
5200 TCGv t2 = tcg_temp_local_new();
5201 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5202 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5203 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
5204 tcg_gen_subfi_tl(t2, 32, t2);
5205 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
5206 tcg_gen_or_tl(t0, t0, t2);
5207 gen_store_spr(SPR_MQ, t0);
5208 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5209 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
5210 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
5211 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
5212 gen_set_label(l1);
5213 tcg_temp_free(t0);
5214 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
5215 tcg_gen_movi_tl(cpu_ca, 0);
5216 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
5217 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
5218 tcg_gen_movi_tl(cpu_ca, 1);
5219 gen_set_label(l2);
5220 tcg_temp_free(t1);
5221 tcg_temp_free(t2);
5222 if (unlikely(Rc(ctx->opcode) != 0))
5223 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5226 /* sre - sre. */
5227 static void gen_sre(DisasContext *ctx)
5229 TCGv t0 = tcg_temp_new();
5230 TCGv t1 = tcg_temp_new();
5231 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5232 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5233 tcg_gen_subfi_tl(t1, 32, t1);
5234 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5235 tcg_gen_or_tl(t1, t0, t1);
5236 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5237 gen_store_spr(SPR_MQ, t1);
5238 tcg_temp_free(t0);
5239 tcg_temp_free(t1);
5240 if (unlikely(Rc(ctx->opcode) != 0))
5241 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5244 /* srea - srea. */
5245 static void gen_srea(DisasContext *ctx)
5247 TCGv t0 = tcg_temp_new();
5248 TCGv t1 = tcg_temp_new();
5249 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5250 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5251 gen_store_spr(SPR_MQ, t0);
5252 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
5253 tcg_temp_free(t0);
5254 tcg_temp_free(t1);
5255 if (unlikely(Rc(ctx->opcode) != 0))
5256 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5259 /* sreq */
5260 static void gen_sreq(DisasContext *ctx)
5262 TCGv t0 = tcg_temp_new();
5263 TCGv t1 = tcg_temp_new();
5264 TCGv t2 = tcg_temp_new();
5265 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
5266 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5267 tcg_gen_shr_tl(t1, t1, t0);
5268 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
5269 gen_load_spr(t2, SPR_MQ);
5270 gen_store_spr(SPR_MQ, t0);
5271 tcg_gen_and_tl(t0, t0, t1);
5272 tcg_gen_andc_tl(t2, t2, t1);
5273 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5274 tcg_temp_free(t0);
5275 tcg_temp_free(t1);
5276 tcg_temp_free(t2);
5277 if (unlikely(Rc(ctx->opcode) != 0))
5278 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5281 /* sriq */
5282 static void gen_sriq(DisasContext *ctx)
5284 int sh = SH(ctx->opcode);
5285 TCGv t0 = tcg_temp_new();
5286 TCGv t1 = tcg_temp_new();
5287 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5288 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
5289 tcg_gen_or_tl(t1, t0, t1);
5290 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5291 gen_store_spr(SPR_MQ, t1);
5292 tcg_temp_free(t0);
5293 tcg_temp_free(t1);
5294 if (unlikely(Rc(ctx->opcode) != 0))
5295 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5298 /* srliq */
5299 static void gen_srliq(DisasContext *ctx)
5301 int sh = SH(ctx->opcode);
5302 TCGv t0 = tcg_temp_new();
5303 TCGv t1 = tcg_temp_new();
5304 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
5305 gen_load_spr(t1, SPR_MQ);
5306 gen_store_spr(SPR_MQ, t0);
5307 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
5308 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
5309 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5310 tcg_temp_free(t0);
5311 tcg_temp_free(t1);
5312 if (unlikely(Rc(ctx->opcode) != 0))
5313 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5316 /* srlq */
5317 static void gen_srlq(DisasContext *ctx)
5319 int l1 = gen_new_label();
5320 int l2 = gen_new_label();
5321 TCGv t0 = tcg_temp_local_new();
5322 TCGv t1 = tcg_temp_local_new();
5323 TCGv t2 = tcg_temp_local_new();
5324 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
5325 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
5326 tcg_gen_shr_tl(t2, t1, t2);
5327 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
5328 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5329 gen_load_spr(t0, SPR_MQ);
5330 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
5331 tcg_gen_br(l2);
5332 gen_set_label(l1);
5333 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
5334 tcg_gen_and_tl(t0, t0, t2);
5335 gen_load_spr(t1, SPR_MQ);
5336 tcg_gen_andc_tl(t1, t1, t2);
5337 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
5338 gen_set_label(l2);
5339 tcg_temp_free(t0);
5340 tcg_temp_free(t1);
5341 tcg_temp_free(t2);
5342 if (unlikely(Rc(ctx->opcode) != 0))
5343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5346 /* srq */
5347 static void gen_srq(DisasContext *ctx)
5349 int l1 = gen_new_label();
5350 TCGv t0 = tcg_temp_new();
5351 TCGv t1 = tcg_temp_new();
5352 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
5353 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
5354 tcg_gen_subfi_tl(t1, 32, t1);
5355 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
5356 tcg_gen_or_tl(t1, t0, t1);
5357 gen_store_spr(SPR_MQ, t1);
5358 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
5359 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
5360 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
5361 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
5362 gen_set_label(l1);
5363 tcg_temp_free(t0);
5364 tcg_temp_free(t1);
5365 if (unlikely(Rc(ctx->opcode) != 0))
5366 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
5369 /* PowerPC 602 specific instructions */
5371 /* dsa */
5372 static void gen_dsa(DisasContext *ctx)
5374 /* XXX: TODO */
5375 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5378 /* esa */
5379 static void gen_esa(DisasContext *ctx)
5381 /* XXX: TODO */
5382 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5385 /* mfrom */
5386 static void gen_mfrom(DisasContext *ctx)
5388 #if defined(CONFIG_USER_ONLY)
5389 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5390 #else
5391 if (unlikely(!ctx->mem_idx)) {
5392 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5393 return;
5395 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
5396 #endif
5399 /* 602 - 603 - G2 TLB management */
5401 /* tlbld */
5402 static void gen_tlbld_6xx(DisasContext *ctx)
5404 #if defined(CONFIG_USER_ONLY)
5405 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5406 #else
5407 if (unlikely(!ctx->mem_idx)) {
5408 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5409 return;
5411 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5412 #endif
5415 /* tlbli */
5416 static void gen_tlbli_6xx(DisasContext *ctx)
5418 #if defined(CONFIG_USER_ONLY)
5419 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5420 #else
5421 if (unlikely(!ctx->mem_idx)) {
5422 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5423 return;
5425 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5426 #endif
5429 /* 74xx TLB management */
5431 /* tlbld */
5432 static void gen_tlbld_74xx(DisasContext *ctx)
5434 #if defined(CONFIG_USER_ONLY)
5435 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5436 #else
5437 if (unlikely(!ctx->mem_idx)) {
5438 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5439 return;
5441 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5442 #endif
5445 /* tlbli */
5446 static void gen_tlbli_74xx(DisasContext *ctx)
5448 #if defined(CONFIG_USER_ONLY)
5449 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5450 #else
5451 if (unlikely(!ctx->mem_idx)) {
5452 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5453 return;
5455 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5456 #endif
5459 /* POWER instructions not in PowerPC 601 */
5461 /* clf */
5462 static void gen_clf(DisasContext *ctx)
5464 /* Cache line flush: implemented as no-op */
5467 /* cli */
5468 static void gen_cli(DisasContext *ctx)
5470 /* Cache line invalidate: privileged and treated as no-op */
5471 #if defined(CONFIG_USER_ONLY)
5472 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5473 #else
5474 if (unlikely(!ctx->mem_idx)) {
5475 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5476 return;
5478 #endif
5481 /* dclst */
5482 static void gen_dclst(DisasContext *ctx)
5484 /* Data cache line store: treated as no-op */
5487 static void gen_mfsri(DisasContext *ctx)
5489 #if defined(CONFIG_USER_ONLY)
5490 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5491 #else
5492 int ra = rA(ctx->opcode);
5493 int rd = rD(ctx->opcode);
5494 TCGv t0;
5495 if (unlikely(!ctx->mem_idx)) {
5496 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5497 return;
5499 t0 = tcg_temp_new();
5500 gen_addr_reg_index(ctx, t0);
5501 tcg_gen_shri_tl(t0, t0, 28);
5502 tcg_gen_andi_tl(t0, t0, 0xF);
5503 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
5504 tcg_temp_free(t0);
5505 if (ra != 0 && ra != rd)
5506 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
5507 #endif
5510 static void gen_rac(DisasContext *ctx)
5512 #if defined(CONFIG_USER_ONLY)
5513 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5514 #else
5515 TCGv t0;
5516 if (unlikely(!ctx->mem_idx)) {
5517 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5518 return;
5520 t0 = tcg_temp_new();
5521 gen_addr_reg_index(ctx, t0);
5522 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5523 tcg_temp_free(t0);
5524 #endif
5527 static void gen_rfsvc(DisasContext *ctx)
5529 #if defined(CONFIG_USER_ONLY)
5530 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5531 #else
5532 if (unlikely(!ctx->mem_idx)) {
5533 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5534 return;
5536 gen_helper_rfsvc(cpu_env);
5537 gen_sync_exception(ctx);
5538 #endif
5541 /* svc is not implemented for now */
5543 /* POWER2 specific instructions */
5544 /* Quad manipulation (load/store two floats at a time) */
5546 /* lfq */
5547 static void gen_lfq(DisasContext *ctx)
5549 int rd = rD(ctx->opcode);
5550 TCGv t0;
5551 gen_set_access_type(ctx, ACCESS_FLOAT);
5552 t0 = tcg_temp_new();
5553 gen_addr_imm_index(ctx, t0, 0);
5554 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5555 gen_addr_add(ctx, t0, t0, 8);
5556 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5557 tcg_temp_free(t0);
5560 /* lfqu */
5561 static void gen_lfqu(DisasContext *ctx)
5563 int ra = rA(ctx->opcode);
5564 int rd = rD(ctx->opcode);
5565 TCGv t0, t1;
5566 gen_set_access_type(ctx, ACCESS_FLOAT);
5567 t0 = tcg_temp_new();
5568 t1 = tcg_temp_new();
5569 gen_addr_imm_index(ctx, t0, 0);
5570 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5571 gen_addr_add(ctx, t1, t0, 8);
5572 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5573 if (ra != 0)
5574 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5575 tcg_temp_free(t0);
5576 tcg_temp_free(t1);
5579 /* lfqux */
5580 static void gen_lfqux(DisasContext *ctx)
5582 int ra = rA(ctx->opcode);
5583 int rd = rD(ctx->opcode);
5584 gen_set_access_type(ctx, ACCESS_FLOAT);
5585 TCGv t0, t1;
5586 t0 = tcg_temp_new();
5587 gen_addr_reg_index(ctx, t0);
5588 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5589 t1 = tcg_temp_new();
5590 gen_addr_add(ctx, t1, t0, 8);
5591 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5592 tcg_temp_free(t1);
5593 if (ra != 0)
5594 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5595 tcg_temp_free(t0);
5598 /* lfqx */
5599 static void gen_lfqx(DisasContext *ctx)
5601 int rd = rD(ctx->opcode);
5602 TCGv t0;
5603 gen_set_access_type(ctx, ACCESS_FLOAT);
5604 t0 = tcg_temp_new();
5605 gen_addr_reg_index(ctx, t0);
5606 gen_qemu_ld64(ctx, cpu_fpr[rd], t0);
5607 gen_addr_add(ctx, t0, t0, 8);
5608 gen_qemu_ld64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5609 tcg_temp_free(t0);
5612 /* stfq */
5613 static void gen_stfq(DisasContext *ctx)
5615 int rd = rD(ctx->opcode);
5616 TCGv t0;
5617 gen_set_access_type(ctx, ACCESS_FLOAT);
5618 t0 = tcg_temp_new();
5619 gen_addr_imm_index(ctx, t0, 0);
5620 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5621 gen_addr_add(ctx, t0, t0, 8);
5622 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5623 tcg_temp_free(t0);
5626 /* stfqu */
5627 static void gen_stfqu(DisasContext *ctx)
5629 int ra = rA(ctx->opcode);
5630 int rd = rD(ctx->opcode);
5631 TCGv t0, t1;
5632 gen_set_access_type(ctx, ACCESS_FLOAT);
5633 t0 = tcg_temp_new();
5634 gen_addr_imm_index(ctx, t0, 0);
5635 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5636 t1 = tcg_temp_new();
5637 gen_addr_add(ctx, t1, t0, 8);
5638 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5639 tcg_temp_free(t1);
5640 if (ra != 0)
5641 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5642 tcg_temp_free(t0);
5645 /* stfqux */
5646 static void gen_stfqux(DisasContext *ctx)
5648 int ra = rA(ctx->opcode);
5649 int rd = rD(ctx->opcode);
5650 TCGv t0, t1;
5651 gen_set_access_type(ctx, ACCESS_FLOAT);
5652 t0 = tcg_temp_new();
5653 gen_addr_reg_index(ctx, t0);
5654 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5655 t1 = tcg_temp_new();
5656 gen_addr_add(ctx, t1, t0, 8);
5657 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t1);
5658 tcg_temp_free(t1);
5659 if (ra != 0)
5660 tcg_gen_mov_tl(cpu_gpr[ra], t0);
5661 tcg_temp_free(t0);
5664 /* stfqx */
5665 static void gen_stfqx(DisasContext *ctx)
5667 int rd = rD(ctx->opcode);
5668 TCGv t0;
5669 gen_set_access_type(ctx, ACCESS_FLOAT);
5670 t0 = tcg_temp_new();
5671 gen_addr_reg_index(ctx, t0);
5672 gen_qemu_st64(ctx, cpu_fpr[rd], t0);
5673 gen_addr_add(ctx, t0, t0, 8);
5674 gen_qemu_st64(ctx, cpu_fpr[(rd + 1) % 32], t0);
5675 tcg_temp_free(t0);
5678 /* BookE specific instructions */
5680 /* XXX: not implemented on 440 ? */
5681 static void gen_mfapidi(DisasContext *ctx)
5683 /* XXX: TODO */
5684 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5687 /* XXX: not implemented on 440 ? */
5688 static void gen_tlbiva(DisasContext *ctx)
5690 #if defined(CONFIG_USER_ONLY)
5691 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5692 #else
5693 TCGv t0;
5694 if (unlikely(!ctx->mem_idx)) {
5695 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
5696 return;
5698 t0 = tcg_temp_new();
5699 gen_addr_reg_index(ctx, t0);
5700 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5701 tcg_temp_free(t0);
5702 #endif
5705 /* All 405 MAC instructions are translated here */
5706 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5707 int ra, int rb, int rt, int Rc)
5709 TCGv t0, t1;
5711 t0 = tcg_temp_local_new();
5712 t1 = tcg_temp_local_new();
5714 switch (opc3 & 0x0D) {
5715 case 0x05:
5716 /* macchw - macchw. - macchwo - macchwo. */
5717 /* macchws - macchws. - macchwso - macchwso. */
5718 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5719 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5720 /* mulchw - mulchw. */
5721 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5722 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5723 tcg_gen_ext16s_tl(t1, t1);
5724 break;
5725 case 0x04:
5726 /* macchwu - macchwu. - macchwuo - macchwuo. */
5727 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5728 /* mulchwu - mulchwu. */
5729 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5730 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5731 tcg_gen_ext16u_tl(t1, t1);
5732 break;
5733 case 0x01:
5734 /* machhw - machhw. - machhwo - machhwo. */
5735 /* machhws - machhws. - machhwso - machhwso. */
5736 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5737 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5738 /* mulhhw - mulhhw. */
5739 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5740 tcg_gen_ext16s_tl(t0, t0);
5741 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5742 tcg_gen_ext16s_tl(t1, t1);
5743 break;
5744 case 0x00:
5745 /* machhwu - machhwu. - machhwuo - machhwuo. */
5746 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5747 /* mulhhwu - mulhhwu. */
5748 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5749 tcg_gen_ext16u_tl(t0, t0);
5750 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5751 tcg_gen_ext16u_tl(t1, t1);
5752 break;
5753 case 0x0D:
5754 /* maclhw - maclhw. - maclhwo - maclhwo. */
5755 /* maclhws - maclhws. - maclhwso - maclhwso. */
5756 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5757 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5758 /* mullhw - mullhw. */
5759 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5760 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5761 break;
5762 case 0x0C:
5763 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5764 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5765 /* mullhwu - mullhwu. */
5766 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5767 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5768 break;
5770 if (opc2 & 0x04) {
5771 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5772 tcg_gen_mul_tl(t1, t0, t1);
5773 if (opc2 & 0x02) {
5774 /* nmultiply-and-accumulate (0x0E) */
5775 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5776 } else {
5777 /* multiply-and-accumulate (0x0C) */
5778 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5781 if (opc3 & 0x12) {
5782 /* Check overflow and/or saturate */
5783 int l1 = gen_new_label();
5785 if (opc3 & 0x10) {
5786 /* Start with XER OV disabled, the most likely case */
5787 tcg_gen_movi_tl(cpu_ov, 0);
5789 if (opc3 & 0x01) {
5790 /* Signed */
5791 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5792 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5793 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5794 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5795 if (opc3 & 0x02) {
5796 /* Saturate */
5797 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5798 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5800 } else {
5801 /* Unsigned */
5802 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5803 if (opc3 & 0x02) {
5804 /* Saturate */
5805 tcg_gen_movi_tl(t0, UINT32_MAX);
5808 if (opc3 & 0x10) {
5809 /* Check overflow */
5810 tcg_gen_movi_tl(cpu_ov, 1);
5811 tcg_gen_movi_tl(cpu_so, 1);
5813 gen_set_label(l1);
5814 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5816 } else {
5817 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5819 tcg_temp_free(t0);
5820 tcg_temp_free(t1);
5821 if (unlikely(Rc) != 0) {
5822 /* Update Rc0 */
5823 gen_set_Rc0(ctx, cpu_gpr[rt]);
5827 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5828 static void glue(gen_, name)(DisasContext *ctx) \
5830 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5831 rD(ctx->opcode), Rc(ctx->opcode)); \
5834 /* macchw - macchw. */
5835 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5836 /* macchwo - macchwo. */
5837 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5838 /* macchws - macchws. */
5839 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5840 /* macchwso - macchwso. */
5841 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5842 /* macchwsu - macchwsu. */
5843 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5844 /* macchwsuo - macchwsuo. */
5845 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5846 /* macchwu - macchwu. */
5847 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5848 /* macchwuo - macchwuo. */
5849 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5850 /* machhw - machhw. */
5851 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5852 /* machhwo - machhwo. */
5853 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5854 /* machhws - machhws. */
5855 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5856 /* machhwso - machhwso. */
5857 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5858 /* machhwsu - machhwsu. */
5859 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5860 /* machhwsuo - machhwsuo. */
5861 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5862 /* machhwu - machhwu. */
5863 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5864 /* machhwuo - machhwuo. */
5865 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5866 /* maclhw - maclhw. */
5867 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5868 /* maclhwo - maclhwo. */
5869 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5870 /* maclhws - maclhws. */
5871 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5872 /* maclhwso - maclhwso. */
5873 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5874 /* maclhwu - maclhwu. */
5875 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5876 /* maclhwuo - maclhwuo. */
5877 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5878 /* maclhwsu - maclhwsu. */
5879 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5880 /* maclhwsuo - maclhwsuo. */
5881 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5882 /* nmacchw - nmacchw. */
5883 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5884 /* nmacchwo - nmacchwo. */
5885 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5886 /* nmacchws - nmacchws. */
5887 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5888 /* nmacchwso - nmacchwso. */
5889 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5890 /* nmachhw - nmachhw. */
5891 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5892 /* nmachhwo - nmachhwo. */
5893 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5894 /* nmachhws - nmachhws. */
5895 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5896 /* nmachhwso - nmachhwso. */
5897 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5898 /* nmaclhw - nmaclhw. */
5899 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5900 /* nmaclhwo - nmaclhwo. */
5901 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5902 /* nmaclhws - nmaclhws. */
5903 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5904 /* nmaclhwso - nmaclhwso. */
5905 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5907 /* mulchw - mulchw. */
5908 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5909 /* mulchwu - mulchwu. */
5910 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5911 /* mulhhw - mulhhw. */
5912 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5913 /* mulhhwu - mulhhwu. */
5914 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5915 /* mullhw - mullhw. */
5916 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5917 /* mullhwu - mullhwu. */
5918 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5920 /* mfdcr */
5921 static void gen_mfdcr(DisasContext *ctx)
5923 #if defined(CONFIG_USER_ONLY)
5924 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5925 #else
5926 TCGv dcrn;
5927 if (unlikely(!ctx->mem_idx)) {
5928 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5929 return;
5931 /* NIP cannot be restored if the memory exception comes from an helper */
5932 gen_update_nip(ctx, ctx->nip - 4);
5933 dcrn = tcg_const_tl(SPR(ctx->opcode));
5934 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5935 tcg_temp_free(dcrn);
5936 #endif
5939 /* mtdcr */
5940 static void gen_mtdcr(DisasContext *ctx)
5942 #if defined(CONFIG_USER_ONLY)
5943 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5944 #else
5945 TCGv dcrn;
5946 if (unlikely(!ctx->mem_idx)) {
5947 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5948 return;
5950 /* NIP cannot be restored if the memory exception comes from an helper */
5951 gen_update_nip(ctx, ctx->nip - 4);
5952 dcrn = tcg_const_tl(SPR(ctx->opcode));
5953 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5954 tcg_temp_free(dcrn);
5955 #endif
5958 /* mfdcrx */
5959 /* XXX: not implemented on 440 ? */
5960 static void gen_mfdcrx(DisasContext *ctx)
5962 #if defined(CONFIG_USER_ONLY)
5963 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5964 #else
5965 if (unlikely(!ctx->mem_idx)) {
5966 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5967 return;
5969 /* NIP cannot be restored if the memory exception comes from an helper */
5970 gen_update_nip(ctx, ctx->nip - 4);
5971 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5972 cpu_gpr[rA(ctx->opcode)]);
5973 /* Note: Rc update flag set leads to undefined state of Rc0 */
5974 #endif
5977 /* mtdcrx */
5978 /* XXX: not implemented on 440 ? */
5979 static void gen_mtdcrx(DisasContext *ctx)
5981 #if defined(CONFIG_USER_ONLY)
5982 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5983 #else
5984 if (unlikely(!ctx->mem_idx)) {
5985 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5986 return;
5988 /* NIP cannot be restored if the memory exception comes from an helper */
5989 gen_update_nip(ctx, ctx->nip - 4);
5990 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5991 cpu_gpr[rS(ctx->opcode)]);
5992 /* Note: Rc update flag set leads to undefined state of Rc0 */
5993 #endif
5996 /* mfdcrux (PPC 460) : user-mode access to DCR */
5997 static void gen_mfdcrux(DisasContext *ctx)
5999 /* NIP cannot be restored if the memory exception comes from an helper */
6000 gen_update_nip(ctx, ctx->nip - 4);
6001 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
6002 cpu_gpr[rA(ctx->opcode)]);
6003 /* Note: Rc update flag set leads to undefined state of Rc0 */
6006 /* mtdcrux (PPC 460) : user-mode access to DCR */
6007 static void gen_mtdcrux(DisasContext *ctx)
6009 /* NIP cannot be restored if the memory exception comes from an helper */
6010 gen_update_nip(ctx, ctx->nip - 4);
6011 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
6012 cpu_gpr[rS(ctx->opcode)]);
6013 /* Note: Rc update flag set leads to undefined state of Rc0 */
6016 /* dccci */
6017 static void gen_dccci(DisasContext *ctx)
6019 #if defined(CONFIG_USER_ONLY)
6020 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6021 #else
6022 if (unlikely(!ctx->mem_idx)) {
6023 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6024 return;
6026 /* interpreted as no-op */
6027 #endif
6030 /* dcread */
6031 static void gen_dcread(DisasContext *ctx)
6033 #if defined(CONFIG_USER_ONLY)
6034 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6035 #else
6036 TCGv EA, val;
6037 if (unlikely(!ctx->mem_idx)) {
6038 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6039 return;
6041 gen_set_access_type(ctx, ACCESS_CACHE);
6042 EA = tcg_temp_new();
6043 gen_addr_reg_index(ctx, EA);
6044 val = tcg_temp_new();
6045 gen_qemu_ld32u(ctx, val, EA);
6046 tcg_temp_free(val);
6047 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
6048 tcg_temp_free(EA);
6049 #endif
6052 /* icbt */
6053 static void gen_icbt_40x(DisasContext *ctx)
6055 /* interpreted as no-op */
6056 /* XXX: specification say this is treated as a load by the MMU
6057 * but does not generate any exception
6061 /* iccci */
6062 static void gen_iccci(DisasContext *ctx)
6064 #if defined(CONFIG_USER_ONLY)
6065 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6066 #else
6067 if (unlikely(!ctx->mem_idx)) {
6068 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6069 return;
6071 /* interpreted as no-op */
6072 #endif
6075 /* icread */
6076 static void gen_icread(DisasContext *ctx)
6078 #if defined(CONFIG_USER_ONLY)
6079 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6080 #else
6081 if (unlikely(!ctx->mem_idx)) {
6082 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6083 return;
6085 /* interpreted as no-op */
6086 #endif
6089 /* rfci (mem_idx only) */
6090 static void gen_rfci_40x(DisasContext *ctx)
6092 #if defined(CONFIG_USER_ONLY)
6093 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6094 #else
6095 if (unlikely(!ctx->mem_idx)) {
6096 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6097 return;
6099 /* Restore CPU state */
6100 gen_helper_40x_rfci(cpu_env);
6101 gen_sync_exception(ctx);
6102 #endif
6105 static void gen_rfci(DisasContext *ctx)
6107 #if defined(CONFIG_USER_ONLY)
6108 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6109 #else
6110 if (unlikely(!ctx->mem_idx)) {
6111 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6112 return;
6114 /* Restore CPU state */
6115 gen_helper_rfci(cpu_env);
6116 gen_sync_exception(ctx);
6117 #endif
6120 /* BookE specific */
6122 /* XXX: not implemented on 440 ? */
6123 static void gen_rfdi(DisasContext *ctx)
6125 #if defined(CONFIG_USER_ONLY)
6126 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6127 #else
6128 if (unlikely(!ctx->mem_idx)) {
6129 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6130 return;
6132 /* Restore CPU state */
6133 gen_helper_rfdi(cpu_env);
6134 gen_sync_exception(ctx);
6135 #endif
6138 /* XXX: not implemented on 440 ? */
6139 static void gen_rfmci(DisasContext *ctx)
6141 #if defined(CONFIG_USER_ONLY)
6142 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6143 #else
6144 if (unlikely(!ctx->mem_idx)) {
6145 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6146 return;
6148 /* Restore CPU state */
6149 gen_helper_rfmci(cpu_env);
6150 gen_sync_exception(ctx);
6151 #endif
6154 /* TLB management - PowerPC 405 implementation */
6156 /* tlbre */
6157 static void gen_tlbre_40x(DisasContext *ctx)
6159 #if defined(CONFIG_USER_ONLY)
6160 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6161 #else
6162 if (unlikely(!ctx->mem_idx)) {
6163 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6164 return;
6166 switch (rB(ctx->opcode)) {
6167 case 0:
6168 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
6169 cpu_gpr[rA(ctx->opcode)]);
6170 break;
6171 case 1:
6172 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
6173 cpu_gpr[rA(ctx->opcode)]);
6174 break;
6175 default:
6176 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6177 break;
6179 #endif
6182 /* tlbsx - tlbsx. */
6183 static void gen_tlbsx_40x(DisasContext *ctx)
6185 #if defined(CONFIG_USER_ONLY)
6186 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6187 #else
6188 TCGv t0;
6189 if (unlikely(!ctx->mem_idx)) {
6190 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6191 return;
6193 t0 = tcg_temp_new();
6194 gen_addr_reg_index(ctx, t0);
6195 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6196 tcg_temp_free(t0);
6197 if (Rc(ctx->opcode)) {
6198 int l1 = gen_new_label();
6199 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6200 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6201 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6202 gen_set_label(l1);
6204 #endif
6207 /* tlbwe */
6208 static void gen_tlbwe_40x(DisasContext *ctx)
6210 #if defined(CONFIG_USER_ONLY)
6211 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6212 #else
6213 if (unlikely(!ctx->mem_idx)) {
6214 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6215 return;
6217 switch (rB(ctx->opcode)) {
6218 case 0:
6219 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6220 cpu_gpr[rS(ctx->opcode)]);
6221 break;
6222 case 1:
6223 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6224 cpu_gpr[rS(ctx->opcode)]);
6225 break;
6226 default:
6227 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6228 break;
6230 #endif
6233 /* TLB management - PowerPC 440 implementation */
6235 /* tlbre */
6236 static void gen_tlbre_440(DisasContext *ctx)
6238 #if defined(CONFIG_USER_ONLY)
6239 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6240 #else
6241 if (unlikely(!ctx->mem_idx)) {
6242 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6243 return;
6245 switch (rB(ctx->opcode)) {
6246 case 0:
6247 case 1:
6248 case 2:
6250 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6251 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6252 t0, cpu_gpr[rA(ctx->opcode)]);
6253 tcg_temp_free_i32(t0);
6255 break;
6256 default:
6257 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6258 break;
6260 #endif
6263 /* tlbsx - tlbsx. */
6264 static void gen_tlbsx_440(DisasContext *ctx)
6266 #if defined(CONFIG_USER_ONLY)
6267 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6268 #else
6269 TCGv t0;
6270 if (unlikely(!ctx->mem_idx)) {
6271 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6272 return;
6274 t0 = tcg_temp_new();
6275 gen_addr_reg_index(ctx, t0);
6276 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6277 tcg_temp_free(t0);
6278 if (Rc(ctx->opcode)) {
6279 int l1 = gen_new_label();
6280 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6281 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6282 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6283 gen_set_label(l1);
6285 #endif
6288 /* tlbwe */
6289 static void gen_tlbwe_440(DisasContext *ctx)
6291 #if defined(CONFIG_USER_ONLY)
6292 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6293 #else
6294 if (unlikely(!ctx->mem_idx)) {
6295 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6296 return;
6298 switch (rB(ctx->opcode)) {
6299 case 0:
6300 case 1:
6301 case 2:
6303 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6304 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6305 cpu_gpr[rS(ctx->opcode)]);
6306 tcg_temp_free_i32(t0);
6308 break;
6309 default:
6310 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6311 break;
6313 #endif
6316 /* TLB management - PowerPC BookE 2.06 implementation */
6318 /* tlbre */
6319 static void gen_tlbre_booke206(DisasContext *ctx)
6321 #if defined(CONFIG_USER_ONLY)
6322 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6323 #else
6324 if (unlikely(!ctx->mem_idx)) {
6325 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6326 return;
6329 gen_helper_booke206_tlbre(cpu_env);
6330 #endif
6333 /* tlbsx - tlbsx. */
6334 static void gen_tlbsx_booke206(DisasContext *ctx)
6336 #if defined(CONFIG_USER_ONLY)
6337 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6338 #else
6339 TCGv t0;
6340 if (unlikely(!ctx->mem_idx)) {
6341 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6342 return;
6345 if (rA(ctx->opcode)) {
6346 t0 = tcg_temp_new();
6347 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6348 } else {
6349 t0 = tcg_const_tl(0);
6352 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6353 gen_helper_booke206_tlbsx(cpu_env, t0);
6354 #endif
6357 /* tlbwe */
6358 static void gen_tlbwe_booke206(DisasContext *ctx)
6360 #if defined(CONFIG_USER_ONLY)
6361 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6362 #else
6363 if (unlikely(!ctx->mem_idx)) {
6364 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6365 return;
6367 gen_update_nip(ctx, ctx->nip - 4);
6368 gen_helper_booke206_tlbwe(cpu_env);
6369 #endif
6372 static void gen_tlbivax_booke206(DisasContext *ctx)
6374 #if defined(CONFIG_USER_ONLY)
6375 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6376 #else
6377 TCGv t0;
6378 if (unlikely(!ctx->mem_idx)) {
6379 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6380 return;
6383 t0 = tcg_temp_new();
6384 gen_addr_reg_index(ctx, t0);
6386 gen_helper_booke206_tlbivax(cpu_env, t0);
6387 #endif
6390 static void gen_tlbilx_booke206(DisasContext *ctx)
6392 #if defined(CONFIG_USER_ONLY)
6393 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6394 #else
6395 TCGv t0;
6396 if (unlikely(!ctx->mem_idx)) {
6397 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6398 return;
6401 t0 = tcg_temp_new();
6402 gen_addr_reg_index(ctx, t0);
6404 switch((ctx->opcode >> 21) & 0x3) {
6405 case 0:
6406 gen_helper_booke206_tlbilx0(cpu_env, t0);
6407 break;
6408 case 1:
6409 gen_helper_booke206_tlbilx1(cpu_env, t0);
6410 break;
6411 case 3:
6412 gen_helper_booke206_tlbilx3(cpu_env, t0);
6413 break;
6414 default:
6415 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6416 break;
6419 tcg_temp_free(t0);
6420 #endif
6424 /* wrtee */
6425 static void gen_wrtee(DisasContext *ctx)
6427 #if defined(CONFIG_USER_ONLY)
6428 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6429 #else
6430 TCGv t0;
6431 if (unlikely(!ctx->mem_idx)) {
6432 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6433 return;
6435 t0 = tcg_temp_new();
6436 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6437 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6438 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6439 tcg_temp_free(t0);
6440 /* Stop translation to have a chance to raise an exception
6441 * if we just set msr_ee to 1
6443 gen_stop_exception(ctx);
6444 #endif
6447 /* wrteei */
6448 static void gen_wrteei(DisasContext *ctx)
6450 #if defined(CONFIG_USER_ONLY)
6451 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6452 #else
6453 if (unlikely(!ctx->mem_idx)) {
6454 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6455 return;
6457 if (ctx->opcode & 0x00008000) {
6458 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6459 /* Stop translation to have a chance to raise an exception */
6460 gen_stop_exception(ctx);
6461 } else {
6462 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6464 #endif
6467 /* PowerPC 440 specific instructions */
6469 /* dlmzb */
6470 static void gen_dlmzb(DisasContext *ctx)
6472 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6473 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6474 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6475 tcg_temp_free_i32(t0);
6478 /* mbar replaces eieio on 440 */
6479 static void gen_mbar(DisasContext *ctx)
6481 /* interpreted as no-op */
6484 /* msync replaces sync on 440 */
6485 static void gen_msync_4xx(DisasContext *ctx)
6487 /* interpreted as no-op */
6490 /* icbt */
6491 static void gen_icbt_440(DisasContext *ctx)
6493 /* interpreted as no-op */
6494 /* XXX: specification say this is treated as a load by the MMU
6495 * but does not generate any exception
6499 /* Embedded.Processor Control */
6501 static void gen_msgclr(DisasContext *ctx)
6503 #if defined(CONFIG_USER_ONLY)
6504 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6505 #else
6506 if (unlikely(ctx->mem_idx == 0)) {
6507 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6508 return;
6511 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6512 #endif
6515 static void gen_msgsnd(DisasContext *ctx)
6517 #if defined(CONFIG_USER_ONLY)
6518 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6519 #else
6520 if (unlikely(ctx->mem_idx == 0)) {
6521 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_OPC);
6522 return;
6525 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6526 #endif
6529 /*** Altivec vector extension ***/
6530 /* Altivec registers moves */
6532 static inline TCGv_ptr gen_avr_ptr(int reg)
6534 TCGv_ptr r = tcg_temp_new_ptr();
6535 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, avr[reg]));
6536 return r;
6539 #define GEN_VR_LDX(name, opc2, opc3) \
6540 static void glue(gen_, name)(DisasContext *ctx) \
6542 TCGv EA; \
6543 if (unlikely(!ctx->altivec_enabled)) { \
6544 gen_exception(ctx, POWERPC_EXCP_VPU); \
6545 return; \
6547 gen_set_access_type(ctx, ACCESS_INT); \
6548 EA = tcg_temp_new(); \
6549 gen_addr_reg_index(ctx, EA); \
6550 tcg_gen_andi_tl(EA, EA, ~0xf); \
6551 if (ctx->le_mode) { \
6552 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6553 tcg_gen_addi_tl(EA, EA, 8); \
6554 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6555 } else { \
6556 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6557 tcg_gen_addi_tl(EA, EA, 8); \
6558 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6560 tcg_temp_free(EA); \
6563 #define GEN_VR_STX(name, opc2, opc3) \
6564 static void gen_st##name(DisasContext *ctx) \
6566 TCGv EA; \
6567 if (unlikely(!ctx->altivec_enabled)) { \
6568 gen_exception(ctx, POWERPC_EXCP_VPU); \
6569 return; \
6571 gen_set_access_type(ctx, ACCESS_INT); \
6572 EA = tcg_temp_new(); \
6573 gen_addr_reg_index(ctx, EA); \
6574 tcg_gen_andi_tl(EA, EA, ~0xf); \
6575 if (ctx->le_mode) { \
6576 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6577 tcg_gen_addi_tl(EA, EA, 8); \
6578 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6579 } else { \
6580 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6581 tcg_gen_addi_tl(EA, EA, 8); \
6582 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6584 tcg_temp_free(EA); \
6587 #define GEN_VR_LVE(name, opc2, opc3) \
6588 static void gen_lve##name(DisasContext *ctx) \
6590 TCGv EA; \
6591 TCGv_ptr rs; \
6592 if (unlikely(!ctx->altivec_enabled)) { \
6593 gen_exception(ctx, POWERPC_EXCP_VPU); \
6594 return; \
6596 gen_set_access_type(ctx, ACCESS_INT); \
6597 EA = tcg_temp_new(); \
6598 gen_addr_reg_index(ctx, EA); \
6599 rs = gen_avr_ptr(rS(ctx->opcode)); \
6600 gen_helper_lve##name(cpu_env, rs, EA); \
6601 tcg_temp_free(EA); \
6602 tcg_temp_free_ptr(rs); \
6605 #define GEN_VR_STVE(name, opc2, opc3) \
6606 static void gen_stve##name(DisasContext *ctx) \
6608 TCGv EA; \
6609 TCGv_ptr rs; \
6610 if (unlikely(!ctx->altivec_enabled)) { \
6611 gen_exception(ctx, POWERPC_EXCP_VPU); \
6612 return; \
6614 gen_set_access_type(ctx, ACCESS_INT); \
6615 EA = tcg_temp_new(); \
6616 gen_addr_reg_index(ctx, EA); \
6617 rs = gen_avr_ptr(rS(ctx->opcode)); \
6618 gen_helper_stve##name(cpu_env, rs, EA); \
6619 tcg_temp_free(EA); \
6620 tcg_temp_free_ptr(rs); \
6623 GEN_VR_LDX(lvx, 0x07, 0x03);
6624 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6625 GEN_VR_LDX(lvxl, 0x07, 0x0B);
6627 GEN_VR_LVE(bx, 0x07, 0x00);
6628 GEN_VR_LVE(hx, 0x07, 0x01);
6629 GEN_VR_LVE(wx, 0x07, 0x02);
6631 GEN_VR_STX(svx, 0x07, 0x07);
6632 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6633 GEN_VR_STX(svxl, 0x07, 0x0F);
6635 GEN_VR_STVE(bx, 0x07, 0x04);
6636 GEN_VR_STVE(hx, 0x07, 0x05);
6637 GEN_VR_STVE(wx, 0x07, 0x06);
6639 static void gen_lvsl(DisasContext *ctx)
6641 TCGv_ptr rd;
6642 TCGv EA;
6643 if (unlikely(!ctx->altivec_enabled)) {
6644 gen_exception(ctx, POWERPC_EXCP_VPU);
6645 return;
6647 EA = tcg_temp_new();
6648 gen_addr_reg_index(ctx, EA);
6649 rd = gen_avr_ptr(rD(ctx->opcode));
6650 gen_helper_lvsl(rd, EA);
6651 tcg_temp_free(EA);
6652 tcg_temp_free_ptr(rd);
6655 static void gen_lvsr(DisasContext *ctx)
6657 TCGv_ptr rd;
6658 TCGv EA;
6659 if (unlikely(!ctx->altivec_enabled)) {
6660 gen_exception(ctx, POWERPC_EXCP_VPU);
6661 return;
6663 EA = tcg_temp_new();
6664 gen_addr_reg_index(ctx, EA);
6665 rd = gen_avr_ptr(rD(ctx->opcode));
6666 gen_helper_lvsr(rd, EA);
6667 tcg_temp_free(EA);
6668 tcg_temp_free_ptr(rd);
6671 static void gen_mfvscr(DisasContext *ctx)
6673 TCGv_i32 t;
6674 if (unlikely(!ctx->altivec_enabled)) {
6675 gen_exception(ctx, POWERPC_EXCP_VPU);
6676 return;
6678 tcg_gen_movi_i64(cpu_avrh[rD(ctx->opcode)], 0);
6679 t = tcg_temp_new_i32();
6680 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
6681 tcg_gen_extu_i32_i64(cpu_avrl[rD(ctx->opcode)], t);
6682 tcg_temp_free_i32(t);
6685 static void gen_mtvscr(DisasContext *ctx)
6687 TCGv_ptr p;
6688 if (unlikely(!ctx->altivec_enabled)) {
6689 gen_exception(ctx, POWERPC_EXCP_VPU);
6690 return;
6692 p = gen_avr_ptr(rD(ctx->opcode));
6693 gen_helper_mtvscr(cpu_env, p);
6694 tcg_temp_free_ptr(p);
6697 /* Logical operations */
6698 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6699 static void glue(gen_, name)(DisasContext *ctx) \
6701 if (unlikely(!ctx->altivec_enabled)) { \
6702 gen_exception(ctx, POWERPC_EXCP_VPU); \
6703 return; \
6705 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6706 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6709 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
6710 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
6711 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
6712 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
6713 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
6715 #define GEN_VXFORM(name, opc2, opc3) \
6716 static void glue(gen_, name)(DisasContext *ctx) \
6718 TCGv_ptr ra, rb, rd; \
6719 if (unlikely(!ctx->altivec_enabled)) { \
6720 gen_exception(ctx, POWERPC_EXCP_VPU); \
6721 return; \
6723 ra = gen_avr_ptr(rA(ctx->opcode)); \
6724 rb = gen_avr_ptr(rB(ctx->opcode)); \
6725 rd = gen_avr_ptr(rD(ctx->opcode)); \
6726 gen_helper_##name (rd, ra, rb); \
6727 tcg_temp_free_ptr(ra); \
6728 tcg_temp_free_ptr(rb); \
6729 tcg_temp_free_ptr(rd); \
6732 #define GEN_VXFORM_ENV(name, opc2, opc3) \
6733 static void glue(gen_, name)(DisasContext *ctx) \
6735 TCGv_ptr ra, rb, rd; \
6736 if (unlikely(!ctx->altivec_enabled)) { \
6737 gen_exception(ctx, POWERPC_EXCP_VPU); \
6738 return; \
6740 ra = gen_avr_ptr(rA(ctx->opcode)); \
6741 rb = gen_avr_ptr(rB(ctx->opcode)); \
6742 rd = gen_avr_ptr(rD(ctx->opcode)); \
6743 gen_helper_##name(cpu_env, rd, ra, rb); \
6744 tcg_temp_free_ptr(ra); \
6745 tcg_temp_free_ptr(rb); \
6746 tcg_temp_free_ptr(rd); \
6749 GEN_VXFORM(vaddubm, 0, 0);
6750 GEN_VXFORM(vadduhm, 0, 1);
6751 GEN_VXFORM(vadduwm, 0, 2);
6752 GEN_VXFORM(vsububm, 0, 16);
6753 GEN_VXFORM(vsubuhm, 0, 17);
6754 GEN_VXFORM(vsubuwm, 0, 18);
6755 GEN_VXFORM(vmaxub, 1, 0);
6756 GEN_VXFORM(vmaxuh, 1, 1);
6757 GEN_VXFORM(vmaxuw, 1, 2);
6758 GEN_VXFORM(vmaxsb, 1, 4);
6759 GEN_VXFORM(vmaxsh, 1, 5);
6760 GEN_VXFORM(vmaxsw, 1, 6);
6761 GEN_VXFORM(vminub, 1, 8);
6762 GEN_VXFORM(vminuh, 1, 9);
6763 GEN_VXFORM(vminuw, 1, 10);
6764 GEN_VXFORM(vminsb, 1, 12);
6765 GEN_VXFORM(vminsh, 1, 13);
6766 GEN_VXFORM(vminsw, 1, 14);
6767 GEN_VXFORM(vavgub, 1, 16);
6768 GEN_VXFORM(vavguh, 1, 17);
6769 GEN_VXFORM(vavguw, 1, 18);
6770 GEN_VXFORM(vavgsb, 1, 20);
6771 GEN_VXFORM(vavgsh, 1, 21);
6772 GEN_VXFORM(vavgsw, 1, 22);
6773 GEN_VXFORM(vmrghb, 6, 0);
6774 GEN_VXFORM(vmrghh, 6, 1);
6775 GEN_VXFORM(vmrghw, 6, 2);
6776 GEN_VXFORM(vmrglb, 6, 4);
6777 GEN_VXFORM(vmrglh, 6, 5);
6778 GEN_VXFORM(vmrglw, 6, 6);
6779 GEN_VXFORM(vmuloub, 4, 0);
6780 GEN_VXFORM(vmulouh, 4, 1);
6781 GEN_VXFORM(vmulosb, 4, 4);
6782 GEN_VXFORM(vmulosh, 4, 5);
6783 GEN_VXFORM(vmuleub, 4, 8);
6784 GEN_VXFORM(vmuleuh, 4, 9);
6785 GEN_VXFORM(vmulesb, 4, 12);
6786 GEN_VXFORM(vmulesh, 4, 13);
6787 GEN_VXFORM(vslb, 2, 4);
6788 GEN_VXFORM(vslh, 2, 5);
6789 GEN_VXFORM(vslw, 2, 6);
6790 GEN_VXFORM(vsrb, 2, 8);
6791 GEN_VXFORM(vsrh, 2, 9);
6792 GEN_VXFORM(vsrw, 2, 10);
6793 GEN_VXFORM(vsrab, 2, 12);
6794 GEN_VXFORM(vsrah, 2, 13);
6795 GEN_VXFORM(vsraw, 2, 14);
6796 GEN_VXFORM(vslo, 6, 16);
6797 GEN_VXFORM(vsro, 6, 17);
6798 GEN_VXFORM(vaddcuw, 0, 6);
6799 GEN_VXFORM(vsubcuw, 0, 22);
6800 GEN_VXFORM_ENV(vaddubs, 0, 8);
6801 GEN_VXFORM_ENV(vadduhs, 0, 9);
6802 GEN_VXFORM_ENV(vadduws, 0, 10);
6803 GEN_VXFORM_ENV(vaddsbs, 0, 12);
6804 GEN_VXFORM_ENV(vaddshs, 0, 13);
6805 GEN_VXFORM_ENV(vaddsws, 0, 14);
6806 GEN_VXFORM_ENV(vsububs, 0, 24);
6807 GEN_VXFORM_ENV(vsubuhs, 0, 25);
6808 GEN_VXFORM_ENV(vsubuws, 0, 26);
6809 GEN_VXFORM_ENV(vsubsbs, 0, 28);
6810 GEN_VXFORM_ENV(vsubshs, 0, 29);
6811 GEN_VXFORM_ENV(vsubsws, 0, 30);
6812 GEN_VXFORM(vrlb, 2, 0);
6813 GEN_VXFORM(vrlh, 2, 1);
6814 GEN_VXFORM(vrlw, 2, 2);
6815 GEN_VXFORM(vsl, 2, 7);
6816 GEN_VXFORM(vsr, 2, 11);
6817 GEN_VXFORM_ENV(vpkuhum, 7, 0);
6818 GEN_VXFORM_ENV(vpkuwum, 7, 1);
6819 GEN_VXFORM_ENV(vpkuhus, 7, 2);
6820 GEN_VXFORM_ENV(vpkuwus, 7, 3);
6821 GEN_VXFORM_ENV(vpkshus, 7, 4);
6822 GEN_VXFORM_ENV(vpkswus, 7, 5);
6823 GEN_VXFORM_ENV(vpkshss, 7, 6);
6824 GEN_VXFORM_ENV(vpkswss, 7, 7);
6825 GEN_VXFORM(vpkpx, 7, 12);
6826 GEN_VXFORM_ENV(vsum4ubs, 4, 24);
6827 GEN_VXFORM_ENV(vsum4sbs, 4, 28);
6828 GEN_VXFORM_ENV(vsum4shs, 4, 25);
6829 GEN_VXFORM_ENV(vsum2sws, 4, 26);
6830 GEN_VXFORM_ENV(vsumsws, 4, 30);
6831 GEN_VXFORM_ENV(vaddfp, 5, 0);
6832 GEN_VXFORM_ENV(vsubfp, 5, 1);
6833 GEN_VXFORM_ENV(vmaxfp, 5, 16);
6834 GEN_VXFORM_ENV(vminfp, 5, 17);
6836 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6837 static void glue(gen_, name)(DisasContext *ctx) \
6839 TCGv_ptr ra, rb, rd; \
6840 if (unlikely(!ctx->altivec_enabled)) { \
6841 gen_exception(ctx, POWERPC_EXCP_VPU); \
6842 return; \
6844 ra = gen_avr_ptr(rA(ctx->opcode)); \
6845 rb = gen_avr_ptr(rB(ctx->opcode)); \
6846 rd = gen_avr_ptr(rD(ctx->opcode)); \
6847 gen_helper_##opname(cpu_env, rd, ra, rb); \
6848 tcg_temp_free_ptr(ra); \
6849 tcg_temp_free_ptr(rb); \
6850 tcg_temp_free_ptr(rd); \
6853 #define GEN_VXRFORM(name, opc2, opc3) \
6854 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6855 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6857 GEN_VXRFORM(vcmpequb, 3, 0)
6858 GEN_VXRFORM(vcmpequh, 3, 1)
6859 GEN_VXRFORM(vcmpequw, 3, 2)
6860 GEN_VXRFORM(vcmpgtsb, 3, 12)
6861 GEN_VXRFORM(vcmpgtsh, 3, 13)
6862 GEN_VXRFORM(vcmpgtsw, 3, 14)
6863 GEN_VXRFORM(vcmpgtub, 3, 8)
6864 GEN_VXRFORM(vcmpgtuh, 3, 9)
6865 GEN_VXRFORM(vcmpgtuw, 3, 10)
6866 GEN_VXRFORM(vcmpeqfp, 3, 3)
6867 GEN_VXRFORM(vcmpgefp, 3, 7)
6868 GEN_VXRFORM(vcmpgtfp, 3, 11)
6869 GEN_VXRFORM(vcmpbfp, 3, 15)
6871 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6872 static void glue(gen_, name)(DisasContext *ctx) \
6874 TCGv_ptr rd; \
6875 TCGv_i32 simm; \
6876 if (unlikely(!ctx->altivec_enabled)) { \
6877 gen_exception(ctx, POWERPC_EXCP_VPU); \
6878 return; \
6880 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6881 rd = gen_avr_ptr(rD(ctx->opcode)); \
6882 gen_helper_##name (rd, simm); \
6883 tcg_temp_free_i32(simm); \
6884 tcg_temp_free_ptr(rd); \
6887 GEN_VXFORM_SIMM(vspltisb, 6, 12);
6888 GEN_VXFORM_SIMM(vspltish, 6, 13);
6889 GEN_VXFORM_SIMM(vspltisw, 6, 14);
6891 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6892 static void glue(gen_, name)(DisasContext *ctx) \
6894 TCGv_ptr rb, rd; \
6895 if (unlikely(!ctx->altivec_enabled)) { \
6896 gen_exception(ctx, POWERPC_EXCP_VPU); \
6897 return; \
6899 rb = gen_avr_ptr(rB(ctx->opcode)); \
6900 rd = gen_avr_ptr(rD(ctx->opcode)); \
6901 gen_helper_##name (rd, rb); \
6902 tcg_temp_free_ptr(rb); \
6903 tcg_temp_free_ptr(rd); \
6906 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
6907 static void glue(gen_, name)(DisasContext *ctx) \
6909 TCGv_ptr rb, rd; \
6911 if (unlikely(!ctx->altivec_enabled)) { \
6912 gen_exception(ctx, POWERPC_EXCP_VPU); \
6913 return; \
6915 rb = gen_avr_ptr(rB(ctx->opcode)); \
6916 rd = gen_avr_ptr(rD(ctx->opcode)); \
6917 gen_helper_##name(cpu_env, rd, rb); \
6918 tcg_temp_free_ptr(rb); \
6919 tcg_temp_free_ptr(rd); \
6922 GEN_VXFORM_NOA(vupkhsb, 7, 8);
6923 GEN_VXFORM_NOA(vupkhsh, 7, 9);
6924 GEN_VXFORM_NOA(vupklsb, 7, 10);
6925 GEN_VXFORM_NOA(vupklsh, 7, 11);
6926 GEN_VXFORM_NOA(vupkhpx, 7, 13);
6927 GEN_VXFORM_NOA(vupklpx, 7, 15);
6928 GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
6929 GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
6930 GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
6931 GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
6932 GEN_VXFORM_NOA_ENV(vrfim, 5, 8);
6933 GEN_VXFORM_NOA_ENV(vrfin, 5, 9);
6934 GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
6935 GEN_VXFORM_NOA_ENV(vrfiz, 5, 11);
6937 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6938 static void glue(gen_, name)(DisasContext *ctx) \
6940 TCGv_ptr rd; \
6941 TCGv_i32 simm; \
6942 if (unlikely(!ctx->altivec_enabled)) { \
6943 gen_exception(ctx, POWERPC_EXCP_VPU); \
6944 return; \
6946 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6947 rd = gen_avr_ptr(rD(ctx->opcode)); \
6948 gen_helper_##name (rd, simm); \
6949 tcg_temp_free_i32(simm); \
6950 tcg_temp_free_ptr(rd); \
6953 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6954 static void glue(gen_, name)(DisasContext *ctx) \
6956 TCGv_ptr rb, rd; \
6957 TCGv_i32 uimm; \
6958 if (unlikely(!ctx->altivec_enabled)) { \
6959 gen_exception(ctx, POWERPC_EXCP_VPU); \
6960 return; \
6962 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6963 rb = gen_avr_ptr(rB(ctx->opcode)); \
6964 rd = gen_avr_ptr(rD(ctx->opcode)); \
6965 gen_helper_##name (rd, rb, uimm); \
6966 tcg_temp_free_i32(uimm); \
6967 tcg_temp_free_ptr(rb); \
6968 tcg_temp_free_ptr(rd); \
6971 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
6972 static void glue(gen_, name)(DisasContext *ctx) \
6974 TCGv_ptr rb, rd; \
6975 TCGv_i32 uimm; \
6977 if (unlikely(!ctx->altivec_enabled)) { \
6978 gen_exception(ctx, POWERPC_EXCP_VPU); \
6979 return; \
6981 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6982 rb = gen_avr_ptr(rB(ctx->opcode)); \
6983 rd = gen_avr_ptr(rD(ctx->opcode)); \
6984 gen_helper_##name(cpu_env, rd, rb, uimm); \
6985 tcg_temp_free_i32(uimm); \
6986 tcg_temp_free_ptr(rb); \
6987 tcg_temp_free_ptr(rd); \
6990 GEN_VXFORM_UIMM(vspltb, 6, 8);
6991 GEN_VXFORM_UIMM(vsplth, 6, 9);
6992 GEN_VXFORM_UIMM(vspltw, 6, 10);
6993 GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
6994 GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
6995 GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
6996 GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
6998 static void gen_vsldoi(DisasContext *ctx)
7000 TCGv_ptr ra, rb, rd;
7001 TCGv_i32 sh;
7002 if (unlikely(!ctx->altivec_enabled)) {
7003 gen_exception(ctx, POWERPC_EXCP_VPU);
7004 return;
7006 ra = gen_avr_ptr(rA(ctx->opcode));
7007 rb = gen_avr_ptr(rB(ctx->opcode));
7008 rd = gen_avr_ptr(rD(ctx->opcode));
7009 sh = tcg_const_i32(VSH(ctx->opcode));
7010 gen_helper_vsldoi (rd, ra, rb, sh);
7011 tcg_temp_free_ptr(ra);
7012 tcg_temp_free_ptr(rb);
7013 tcg_temp_free_ptr(rd);
7014 tcg_temp_free_i32(sh);
7017 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
7018 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7020 TCGv_ptr ra, rb, rc, rd; \
7021 if (unlikely(!ctx->altivec_enabled)) { \
7022 gen_exception(ctx, POWERPC_EXCP_VPU); \
7023 return; \
7025 ra = gen_avr_ptr(rA(ctx->opcode)); \
7026 rb = gen_avr_ptr(rB(ctx->opcode)); \
7027 rc = gen_avr_ptr(rC(ctx->opcode)); \
7028 rd = gen_avr_ptr(rD(ctx->opcode)); \
7029 if (Rc(ctx->opcode)) { \
7030 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
7031 } else { \
7032 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
7034 tcg_temp_free_ptr(ra); \
7035 tcg_temp_free_ptr(rb); \
7036 tcg_temp_free_ptr(rc); \
7037 tcg_temp_free_ptr(rd); \
7040 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
7042 static void gen_vmladduhm(DisasContext *ctx)
7044 TCGv_ptr ra, rb, rc, rd;
7045 if (unlikely(!ctx->altivec_enabled)) {
7046 gen_exception(ctx, POWERPC_EXCP_VPU);
7047 return;
7049 ra = gen_avr_ptr(rA(ctx->opcode));
7050 rb = gen_avr_ptr(rB(ctx->opcode));
7051 rc = gen_avr_ptr(rC(ctx->opcode));
7052 rd = gen_avr_ptr(rD(ctx->opcode));
7053 gen_helper_vmladduhm(rd, ra, rb, rc);
7054 tcg_temp_free_ptr(ra);
7055 tcg_temp_free_ptr(rb);
7056 tcg_temp_free_ptr(rc);
7057 tcg_temp_free_ptr(rd);
7060 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
7061 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
7062 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
7063 GEN_VAFORM_PAIRED(vsel, vperm, 21)
7064 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
7066 /*** VSX extension ***/
7068 static inline TCGv_i64 cpu_vsrh(int n)
7070 if (n < 32) {
7071 return cpu_fpr[n];
7072 } else {
7073 return cpu_avrh[n-32];
7077 static inline TCGv_i64 cpu_vsrl(int n)
7079 if (n < 32) {
7080 return cpu_vsr[n];
7081 } else {
7082 return cpu_avrl[n-32];
7086 #define VSX_LOAD_SCALAR(name, operation) \
7087 static void gen_##name(DisasContext *ctx) \
7089 TCGv EA; \
7090 if (unlikely(!ctx->vsx_enabled)) { \
7091 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7092 return; \
7094 gen_set_access_type(ctx, ACCESS_INT); \
7095 EA = tcg_temp_new(); \
7096 gen_addr_reg_index(ctx, EA); \
7097 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
7098 /* NOTE: cpu_vsrl is undefined */ \
7099 tcg_temp_free(EA); \
7102 VSX_LOAD_SCALAR(lxsdx, ld64)
7103 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
7104 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
7105 VSX_LOAD_SCALAR(lxsspx, ld32fs)
7107 static void gen_lxvd2x(DisasContext *ctx)
7109 TCGv EA;
7110 if (unlikely(!ctx->vsx_enabled)) {
7111 gen_exception(ctx, POWERPC_EXCP_VSXU);
7112 return;
7114 gen_set_access_type(ctx, ACCESS_INT);
7115 EA = tcg_temp_new();
7116 gen_addr_reg_index(ctx, EA);
7117 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
7118 tcg_gen_addi_tl(EA, EA, 8);
7119 gen_qemu_ld64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
7120 tcg_temp_free(EA);
7123 static void gen_lxvdsx(DisasContext *ctx)
7125 TCGv EA;
7126 if (unlikely(!ctx->vsx_enabled)) {
7127 gen_exception(ctx, POWERPC_EXCP_VSXU);
7128 return;
7130 gen_set_access_type(ctx, ACCESS_INT);
7131 EA = tcg_temp_new();
7132 gen_addr_reg_index(ctx, EA);
7133 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
7134 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
7135 tcg_temp_free(EA);
7138 static void gen_lxvw4x(DisasContext *ctx)
7140 TCGv EA;
7141 TCGv_i64 tmp;
7142 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
7143 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
7144 if (unlikely(!ctx->vsx_enabled)) {
7145 gen_exception(ctx, POWERPC_EXCP_VSXU);
7146 return;
7148 gen_set_access_type(ctx, ACCESS_INT);
7149 EA = tcg_temp_new();
7150 tmp = tcg_temp_new_i64();
7152 gen_addr_reg_index(ctx, EA);
7153 gen_qemu_ld32u_i64(ctx, tmp, EA);
7154 tcg_gen_addi_tl(EA, EA, 4);
7155 gen_qemu_ld32u_i64(ctx, xth, EA);
7156 tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
7158 tcg_gen_addi_tl(EA, EA, 4);
7159 gen_qemu_ld32u_i64(ctx, tmp, EA);
7160 tcg_gen_addi_tl(EA, EA, 4);
7161 gen_qemu_ld32u_i64(ctx, xtl, EA);
7162 tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
7164 tcg_temp_free(EA);
7165 tcg_temp_free_i64(tmp);
7168 #define VSX_STORE_SCALAR(name, operation) \
7169 static void gen_##name(DisasContext *ctx) \
7171 TCGv EA; \
7172 if (unlikely(!ctx->vsx_enabled)) { \
7173 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7174 return; \
7176 gen_set_access_type(ctx, ACCESS_INT); \
7177 EA = tcg_temp_new(); \
7178 gen_addr_reg_index(ctx, EA); \
7179 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
7180 tcg_temp_free(EA); \
7183 VSX_STORE_SCALAR(stxsdx, st64)
7184 VSX_STORE_SCALAR(stxsiwx, st32_i64)
7185 VSX_STORE_SCALAR(stxsspx, st32fs)
7187 static void gen_stxvd2x(DisasContext *ctx)
7189 TCGv EA;
7190 if (unlikely(!ctx->vsx_enabled)) {
7191 gen_exception(ctx, POWERPC_EXCP_VSXU);
7192 return;
7194 gen_set_access_type(ctx, ACCESS_INT);
7195 EA = tcg_temp_new();
7196 gen_addr_reg_index(ctx, EA);
7197 gen_qemu_st64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
7198 tcg_gen_addi_tl(EA, EA, 8);
7199 gen_qemu_st64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
7200 tcg_temp_free(EA);
7203 static void gen_stxvw4x(DisasContext *ctx)
7205 TCGv_i64 tmp;
7206 TCGv EA;
7207 if (unlikely(!ctx->vsx_enabled)) {
7208 gen_exception(ctx, POWERPC_EXCP_VSXU);
7209 return;
7211 gen_set_access_type(ctx, ACCESS_INT);
7212 EA = tcg_temp_new();
7213 gen_addr_reg_index(ctx, EA);
7214 tmp = tcg_temp_new_i64();
7216 tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
7217 gen_qemu_st32_i64(ctx, tmp, EA);
7218 tcg_gen_addi_tl(EA, EA, 4);
7219 gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
7221 tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
7222 tcg_gen_addi_tl(EA, EA, 4);
7223 gen_qemu_st32_i64(ctx, tmp, EA);
7224 tcg_gen_addi_tl(EA, EA, 4);
7225 gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
7227 tcg_temp_free(EA);
7228 tcg_temp_free_i64(tmp);
7231 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
7232 static void gen_##name(DisasContext *ctx) \
7234 if (xS(ctx->opcode) < 32) { \
7235 if (unlikely(!ctx->fpu_enabled)) { \
7236 gen_exception(ctx, POWERPC_EXCP_FPU); \
7237 return; \
7239 } else { \
7240 if (unlikely(!ctx->altivec_enabled)) { \
7241 gen_exception(ctx, POWERPC_EXCP_VPU); \
7242 return; \
7245 TCGv_i64 tmp = tcg_temp_new_i64(); \
7246 tcg_gen_##tcgop1(tmp, source); \
7247 tcg_gen_##tcgop2(target, tmp); \
7248 tcg_temp_free_i64(tmp); \
7252 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
7253 cpu_vsrh(xS(ctx->opcode)))
7254 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
7255 cpu_gpr[rA(ctx->opcode)])
7256 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
7257 cpu_gpr[rA(ctx->opcode)])
7259 #if defined(TARGET_PPC64)
7260 #define MV_VSRD(name, target, source) \
7261 static void gen_##name(DisasContext *ctx) \
7263 if (xS(ctx->opcode) < 32) { \
7264 if (unlikely(!ctx->fpu_enabled)) { \
7265 gen_exception(ctx, POWERPC_EXCP_FPU); \
7266 return; \
7268 } else { \
7269 if (unlikely(!ctx->altivec_enabled)) { \
7270 gen_exception(ctx, POWERPC_EXCP_VPU); \
7271 return; \
7274 tcg_gen_mov_i64(target, source); \
7277 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
7278 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
7280 #endif
7282 static void gen_xxpermdi(DisasContext *ctx)
7284 if (unlikely(!ctx->vsx_enabled)) {
7285 gen_exception(ctx, POWERPC_EXCP_VSXU);
7286 return;
7289 if ((DM(ctx->opcode) & 2) == 0) {
7290 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
7291 } else {
7292 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
7294 if ((DM(ctx->opcode) & 1) == 0) {
7295 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
7296 } else {
7297 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
7301 #define OP_ABS 1
7302 #define OP_NABS 2
7303 #define OP_NEG 3
7304 #define OP_CPSGN 4
7305 #define SGN_MASK_DP 0x8000000000000000ul
7306 #define SGN_MASK_SP 0x8000000080000000ul
7308 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
7309 static void glue(gen_, name)(DisasContext * ctx) \
7311 TCGv_i64 xb, sgm; \
7312 if (unlikely(!ctx->vsx_enabled)) { \
7313 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7314 return; \
7316 xb = tcg_temp_new_i64(); \
7317 sgm = tcg_temp_new_i64(); \
7318 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
7319 tcg_gen_movi_i64(sgm, sgn_mask); \
7320 switch (op) { \
7321 case OP_ABS: { \
7322 tcg_gen_andc_i64(xb, xb, sgm); \
7323 break; \
7325 case OP_NABS: { \
7326 tcg_gen_or_i64(xb, xb, sgm); \
7327 break; \
7329 case OP_NEG: { \
7330 tcg_gen_xor_i64(xb, xb, sgm); \
7331 break; \
7333 case OP_CPSGN: { \
7334 TCGv_i64 xa = tcg_temp_new_i64(); \
7335 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
7336 tcg_gen_and_i64(xa, xa, sgm); \
7337 tcg_gen_andc_i64(xb, xb, sgm); \
7338 tcg_gen_or_i64(xb, xb, xa); \
7339 tcg_temp_free_i64(xa); \
7340 break; \
7343 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
7344 tcg_temp_free_i64(xb); \
7345 tcg_temp_free_i64(sgm); \
7348 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
7349 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
7350 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
7351 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
7353 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
7354 static void glue(gen_, name)(DisasContext * ctx) \
7356 TCGv_i64 xbh, xbl, sgm; \
7357 if (unlikely(!ctx->vsx_enabled)) { \
7358 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7359 return; \
7361 xbh = tcg_temp_new_i64(); \
7362 xbl = tcg_temp_new_i64(); \
7363 sgm = tcg_temp_new_i64(); \
7364 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
7365 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
7366 tcg_gen_movi_i64(sgm, sgn_mask); \
7367 switch (op) { \
7368 case OP_ABS: { \
7369 tcg_gen_andc_i64(xbh, xbh, sgm); \
7370 tcg_gen_andc_i64(xbl, xbl, sgm); \
7371 break; \
7373 case OP_NABS: { \
7374 tcg_gen_or_i64(xbh, xbh, sgm); \
7375 tcg_gen_or_i64(xbl, xbl, sgm); \
7376 break; \
7378 case OP_NEG: { \
7379 tcg_gen_xor_i64(xbh, xbh, sgm); \
7380 tcg_gen_xor_i64(xbl, xbl, sgm); \
7381 break; \
7383 case OP_CPSGN: { \
7384 TCGv_i64 xah = tcg_temp_new_i64(); \
7385 TCGv_i64 xal = tcg_temp_new_i64(); \
7386 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
7387 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
7388 tcg_gen_and_i64(xah, xah, sgm); \
7389 tcg_gen_and_i64(xal, xal, sgm); \
7390 tcg_gen_andc_i64(xbh, xbh, sgm); \
7391 tcg_gen_andc_i64(xbl, xbl, sgm); \
7392 tcg_gen_or_i64(xbh, xbh, xah); \
7393 tcg_gen_or_i64(xbl, xbl, xal); \
7394 tcg_temp_free_i64(xah); \
7395 tcg_temp_free_i64(xal); \
7396 break; \
7399 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
7400 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
7401 tcg_temp_free_i64(xbh); \
7402 tcg_temp_free_i64(xbl); \
7403 tcg_temp_free_i64(sgm); \
7406 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
7407 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
7408 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
7409 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
7410 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
7411 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
7412 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
7413 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
7415 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
7416 static void gen_##name(DisasContext * ctx) \
7418 TCGv_i32 opc; \
7419 if (unlikely(!ctx->vsx_enabled)) { \
7420 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7421 return; \
7423 /* NIP cannot be restored if the memory exception comes from an helper */ \
7424 gen_update_nip(ctx, ctx->nip - 4); \
7425 opc = tcg_const_i32(ctx->opcode); \
7426 gen_helper_##name(cpu_env, opc); \
7427 tcg_temp_free_i32(opc); \
7430 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
7431 static void gen_##name(DisasContext * ctx) \
7433 if (unlikely(!ctx->vsx_enabled)) { \
7434 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7435 return; \
7437 /* NIP cannot be restored if the exception comes */ \
7438 /* from a helper. */ \
7439 gen_update_nip(ctx, ctx->nip - 4); \
7441 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
7442 cpu_vsrh(xB(ctx->opcode))); \
7445 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
7446 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
7447 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
7448 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
7449 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
7450 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
7451 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
7452 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
7453 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
7454 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
7455 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
7456 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
7457 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
7458 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
7459 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
7460 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
7461 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
7462 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
7463 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
7464 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
7465 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
7466 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
7467 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
7468 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
7469 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
7470 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
7471 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
7472 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
7473 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
7474 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
7475 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
7476 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
7477 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
7478 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
7479 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
7480 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
7481 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
7483 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
7484 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
7485 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
7486 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
7487 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
7488 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
7489 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
7490 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
7491 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
7492 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
7493 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
7494 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
7495 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
7496 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
7497 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
7498 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
7499 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
7501 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
7502 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
7503 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
7504 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
7505 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
7506 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
7507 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
7508 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
7509 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
7510 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
7511 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
7512 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
7513 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
7514 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
7515 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
7516 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
7517 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
7518 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
7519 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
7520 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
7521 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
7522 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
7523 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
7524 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
7525 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
7526 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
7527 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
7528 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
7529 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
7530 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
7531 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
7532 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
7533 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
7534 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
7535 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
7536 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
7538 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
7539 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
7540 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
7541 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
7542 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
7543 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
7544 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
7545 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
7546 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
7547 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
7548 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
7549 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
7550 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
7551 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
7552 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
7553 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
7554 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
7555 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
7556 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
7557 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
7558 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
7559 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
7560 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
7561 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
7562 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
7563 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
7564 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
7565 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
7566 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
7567 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
7568 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
7569 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
7570 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
7571 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
7572 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
7573 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
7575 #define VSX_LOGICAL(name, tcg_op) \
7576 static void glue(gen_, name)(DisasContext * ctx) \
7578 if (unlikely(!ctx->vsx_enabled)) { \
7579 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7580 return; \
7582 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
7583 cpu_vsrh(xB(ctx->opcode))); \
7584 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
7585 cpu_vsrl(xB(ctx->opcode))); \
7588 VSX_LOGICAL(xxland, tcg_gen_and_i64)
7589 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
7590 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
7591 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
7592 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
7593 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
7594 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
7595 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
7597 #define VSX_XXMRG(name, high) \
7598 static void glue(gen_, name)(DisasContext * ctx) \
7600 TCGv_i64 a0, a1, b0, b1; \
7601 if (unlikely(!ctx->vsx_enabled)) { \
7602 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7603 return; \
7605 a0 = tcg_temp_new_i64(); \
7606 a1 = tcg_temp_new_i64(); \
7607 b0 = tcg_temp_new_i64(); \
7608 b1 = tcg_temp_new_i64(); \
7609 if (high) { \
7610 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
7611 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
7612 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
7613 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
7614 } else { \
7615 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
7616 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
7617 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
7618 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
7620 tcg_gen_shri_i64(a0, a0, 32); \
7621 tcg_gen_shri_i64(b0, b0, 32); \
7622 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
7623 b0, a0, 32, 32); \
7624 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
7625 b1, a1, 32, 32); \
7626 tcg_temp_free_i64(a0); \
7627 tcg_temp_free_i64(a1); \
7628 tcg_temp_free_i64(b0); \
7629 tcg_temp_free_i64(b1); \
7632 VSX_XXMRG(xxmrghw, 1)
7633 VSX_XXMRG(xxmrglw, 0)
7635 static void gen_xxsel(DisasContext * ctx)
7637 TCGv_i64 a, b, c;
7638 if (unlikely(!ctx->vsx_enabled)) {
7639 gen_exception(ctx, POWERPC_EXCP_VSXU);
7640 return;
7642 a = tcg_temp_new_i64();
7643 b = tcg_temp_new_i64();
7644 c = tcg_temp_new_i64();
7646 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
7647 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
7648 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
7650 tcg_gen_and_i64(b, b, c);
7651 tcg_gen_andc_i64(a, a, c);
7652 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
7654 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
7655 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
7656 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
7658 tcg_gen_and_i64(b, b, c);
7659 tcg_gen_andc_i64(a, a, c);
7660 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
7662 tcg_temp_free_i64(a);
7663 tcg_temp_free_i64(b);
7664 tcg_temp_free_i64(c);
7667 static void gen_xxspltw(DisasContext *ctx)
7669 TCGv_i64 b, b2;
7670 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
7671 cpu_vsrl(xB(ctx->opcode)) :
7672 cpu_vsrh(xB(ctx->opcode));
7674 if (unlikely(!ctx->vsx_enabled)) {
7675 gen_exception(ctx, POWERPC_EXCP_VSXU);
7676 return;
7679 b = tcg_temp_new_i64();
7680 b2 = tcg_temp_new_i64();
7682 if (UIM(ctx->opcode) & 1) {
7683 tcg_gen_ext32u_i64(b, vsr);
7684 } else {
7685 tcg_gen_shri_i64(b, vsr, 32);
7688 tcg_gen_shli_i64(b2, b, 32);
7689 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
7690 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
7692 tcg_temp_free_i64(b);
7693 tcg_temp_free_i64(b2);
7696 static void gen_xxsldwi(DisasContext *ctx)
7698 TCGv_i64 xth, xtl;
7699 if (unlikely(!ctx->vsx_enabled)) {
7700 gen_exception(ctx, POWERPC_EXCP_VSXU);
7701 return;
7703 xth = tcg_temp_new_i64();
7704 xtl = tcg_temp_new_i64();
7706 switch (SHW(ctx->opcode)) {
7707 case 0: {
7708 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
7709 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
7710 break;
7712 case 1: {
7713 TCGv_i64 t0 = tcg_temp_new_i64();
7714 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
7715 tcg_gen_shli_i64(xth, xth, 32);
7716 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
7717 tcg_gen_shri_i64(t0, t0, 32);
7718 tcg_gen_or_i64(xth, xth, t0);
7719 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
7720 tcg_gen_shli_i64(xtl, xtl, 32);
7721 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
7722 tcg_gen_shri_i64(t0, t0, 32);
7723 tcg_gen_or_i64(xtl, xtl, t0);
7724 tcg_temp_free_i64(t0);
7725 break;
7727 case 2: {
7728 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
7729 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
7730 break;
7732 case 3: {
7733 TCGv_i64 t0 = tcg_temp_new_i64();
7734 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
7735 tcg_gen_shli_i64(xth, xth, 32);
7736 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
7737 tcg_gen_shri_i64(t0, t0, 32);
7738 tcg_gen_or_i64(xth, xth, t0);
7739 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
7740 tcg_gen_shli_i64(xtl, xtl, 32);
7741 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
7742 tcg_gen_shri_i64(t0, t0, 32);
7743 tcg_gen_or_i64(xtl, xtl, t0);
7744 tcg_temp_free_i64(t0);
7745 break;
7749 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
7750 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
7752 tcg_temp_free_i64(xth);
7753 tcg_temp_free_i64(xtl);
7757 /*** SPE extension ***/
7758 /* Register moves */
7760 static inline void gen_evmra(DisasContext *ctx)
7763 if (unlikely(!ctx->spe_enabled)) {
7764 gen_exception(ctx, POWERPC_EXCP_SPEU);
7765 return;
7768 #if defined(TARGET_PPC64)
7769 /* rD := rA */
7770 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7772 /* spe_acc := rA */
7773 tcg_gen_st_i64(cpu_gpr[rA(ctx->opcode)],
7774 cpu_env,
7775 offsetof(CPUPPCState, spe_acc));
7776 #else
7777 TCGv_i64 tmp = tcg_temp_new_i64();
7779 /* tmp := rA_lo + rA_hi << 32 */
7780 tcg_gen_concat_i32_i64(tmp, cpu_gpr[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7782 /* spe_acc := tmp */
7783 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
7784 tcg_temp_free_i64(tmp);
7786 /* rD := rA */
7787 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
7788 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
7789 #endif
7792 static inline void gen_load_gpr64(TCGv_i64 t, int reg)
7794 #if defined(TARGET_PPC64)
7795 tcg_gen_mov_i64(t, cpu_gpr[reg]);
7796 #else
7797 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
7798 #endif
7801 static inline void gen_store_gpr64(int reg, TCGv_i64 t)
7803 #if defined(TARGET_PPC64)
7804 tcg_gen_mov_i64(cpu_gpr[reg], t);
7805 #else
7806 TCGv_i64 tmp = tcg_temp_new_i64();
7807 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
7808 tcg_gen_shri_i64(tmp, t, 32);
7809 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
7810 tcg_temp_free_i64(tmp);
7811 #endif
7814 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
7815 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7817 if (Rc(ctx->opcode)) \
7818 gen_##name1(ctx); \
7819 else \
7820 gen_##name0(ctx); \
7823 /* Handler for undefined SPE opcodes */
7824 static inline void gen_speundef(DisasContext *ctx)
7826 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7829 /* SPE logic */
7830 #if defined(TARGET_PPC64)
7831 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7832 static inline void gen_##name(DisasContext *ctx) \
7834 if (unlikely(!ctx->spe_enabled)) { \
7835 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7836 return; \
7838 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7839 cpu_gpr[rB(ctx->opcode)]); \
7841 #else
7842 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7843 static inline void gen_##name(DisasContext *ctx) \
7845 if (unlikely(!ctx->spe_enabled)) { \
7846 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7847 return; \
7849 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7850 cpu_gpr[rB(ctx->opcode)]); \
7851 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7852 cpu_gprh[rB(ctx->opcode)]); \
7854 #endif
7856 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
7857 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
7858 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
7859 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
7860 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
7861 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
7862 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
7863 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
7865 /* SPE logic immediate */
7866 #if defined(TARGET_PPC64)
7867 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7868 static inline void gen_##name(DisasContext *ctx) \
7870 if (unlikely(!ctx->spe_enabled)) { \
7871 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7872 return; \
7874 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7875 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7876 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7877 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7878 tcg_opi(t0, t0, rB(ctx->opcode)); \
7879 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7880 tcg_gen_trunc_i64_i32(t1, t2); \
7881 tcg_temp_free_i64(t2); \
7882 tcg_opi(t1, t1, rB(ctx->opcode)); \
7883 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7884 tcg_temp_free_i32(t0); \
7885 tcg_temp_free_i32(t1); \
7887 #else
7888 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7889 static inline void gen_##name(DisasContext *ctx) \
7891 if (unlikely(!ctx->spe_enabled)) { \
7892 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7893 return; \
7895 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7896 rB(ctx->opcode)); \
7897 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7898 rB(ctx->opcode)); \
7900 #endif
7901 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
7902 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
7903 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
7904 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
7906 /* SPE arithmetic */
7907 #if defined(TARGET_PPC64)
7908 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7909 static inline void gen_##name(DisasContext *ctx) \
7911 if (unlikely(!ctx->spe_enabled)) { \
7912 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7913 return; \
7915 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7916 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7917 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7918 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7919 tcg_op(t0, t0); \
7920 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7921 tcg_gen_trunc_i64_i32(t1, t2); \
7922 tcg_temp_free_i64(t2); \
7923 tcg_op(t1, t1); \
7924 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7925 tcg_temp_free_i32(t0); \
7926 tcg_temp_free_i32(t1); \
7928 #else
7929 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7930 static inline void gen_##name(DisasContext *ctx) \
7932 if (unlikely(!ctx->spe_enabled)) { \
7933 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7934 return; \
7936 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
7937 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
7939 #endif
7941 static inline void gen_op_evabs(TCGv_i32 ret, TCGv_i32 arg1)
7943 int l1 = gen_new_label();
7944 int l2 = gen_new_label();
7946 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
7947 tcg_gen_neg_i32(ret, arg1);
7948 tcg_gen_br(l2);
7949 gen_set_label(l1);
7950 tcg_gen_mov_i32(ret, arg1);
7951 gen_set_label(l2);
7953 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
7954 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
7955 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
7956 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
7957 static inline void gen_op_evrndw(TCGv_i32 ret, TCGv_i32 arg1)
7959 tcg_gen_addi_i32(ret, arg1, 0x8000);
7960 tcg_gen_ext16u_i32(ret, ret);
7962 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
7963 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
7964 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
7966 #if defined(TARGET_PPC64)
7967 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7968 static inline void gen_##name(DisasContext *ctx) \
7970 if (unlikely(!ctx->spe_enabled)) { \
7971 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7972 return; \
7974 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7975 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7976 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
7977 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
7978 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7979 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
7980 tcg_op(t0, t0, t2); \
7981 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
7982 tcg_gen_trunc_i64_i32(t1, t3); \
7983 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
7984 tcg_gen_trunc_i64_i32(t2, t3); \
7985 tcg_temp_free_i64(t3); \
7986 tcg_op(t1, t1, t2); \
7987 tcg_temp_free_i32(t2); \
7988 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7989 tcg_temp_free_i32(t0); \
7990 tcg_temp_free_i32(t1); \
7992 #else
7993 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7994 static inline void gen_##name(DisasContext *ctx) \
7996 if (unlikely(!ctx->spe_enabled)) { \
7997 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7998 return; \
8000 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
8001 cpu_gpr[rB(ctx->opcode)]); \
8002 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
8003 cpu_gprh[rB(ctx->opcode)]); \
8005 #endif
8007 static inline void gen_op_evsrwu(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
8009 TCGv_i32 t0;
8010 int l1, l2;
8012 l1 = gen_new_label();
8013 l2 = gen_new_label();
8014 t0 = tcg_temp_local_new_i32();
8015 /* No error here: 6 bits are used */
8016 tcg_gen_andi_i32(t0, arg2, 0x3F);
8017 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
8018 tcg_gen_shr_i32(ret, arg1, t0);
8019 tcg_gen_br(l2);
8020 gen_set_label(l1);
8021 tcg_gen_movi_i32(ret, 0);
8022 gen_set_label(l2);
8023 tcg_temp_free_i32(t0);
8025 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
8026 static inline void gen_op_evsrws(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
8028 TCGv_i32 t0;
8029 int l1, l2;
8031 l1 = gen_new_label();
8032 l2 = gen_new_label();
8033 t0 = tcg_temp_local_new_i32();
8034 /* No error here: 6 bits are used */
8035 tcg_gen_andi_i32(t0, arg2, 0x3F);
8036 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
8037 tcg_gen_sar_i32(ret, arg1, t0);
8038 tcg_gen_br(l2);
8039 gen_set_label(l1);
8040 tcg_gen_movi_i32(ret, 0);
8041 gen_set_label(l2);
8042 tcg_temp_free_i32(t0);
8044 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
8045 static inline void gen_op_evslw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
8047 TCGv_i32 t0;
8048 int l1, l2;
8050 l1 = gen_new_label();
8051 l2 = gen_new_label();
8052 t0 = tcg_temp_local_new_i32();
8053 /* No error here: 6 bits are used */
8054 tcg_gen_andi_i32(t0, arg2, 0x3F);
8055 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
8056 tcg_gen_shl_i32(ret, arg1, t0);
8057 tcg_gen_br(l2);
8058 gen_set_label(l1);
8059 tcg_gen_movi_i32(ret, 0);
8060 gen_set_label(l2);
8061 tcg_temp_free_i32(t0);
8063 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
8064 static inline void gen_op_evrlw(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
8066 TCGv_i32 t0 = tcg_temp_new_i32();
8067 tcg_gen_andi_i32(t0, arg2, 0x1F);
8068 tcg_gen_rotl_i32(ret, arg1, t0);
8069 tcg_temp_free_i32(t0);
8071 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
8072 static inline void gen_evmergehi(DisasContext *ctx)
8074 if (unlikely(!ctx->spe_enabled)) {
8075 gen_exception(ctx, POWERPC_EXCP_SPEU);
8076 return;
8078 #if defined(TARGET_PPC64)
8079 TCGv t0 = tcg_temp_new();
8080 TCGv t1 = tcg_temp_new();
8081 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
8082 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
8083 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
8084 tcg_temp_free(t0);
8085 tcg_temp_free(t1);
8086 #else
8087 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
8088 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
8089 #endif
8091 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
8092 static inline void gen_op_evsubf(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
8094 tcg_gen_sub_i32(ret, arg2, arg1);
8096 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
8098 /* SPE arithmetic immediate */
8099 #if defined(TARGET_PPC64)
8100 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
8101 static inline void gen_##name(DisasContext *ctx) \
8103 if (unlikely(!ctx->spe_enabled)) { \
8104 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8105 return; \
8107 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
8108 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
8109 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
8110 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8111 tcg_op(t0, t0, rA(ctx->opcode)); \
8112 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
8113 tcg_gen_trunc_i64_i32(t1, t2); \
8114 tcg_temp_free_i64(t2); \
8115 tcg_op(t1, t1, rA(ctx->opcode)); \
8116 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
8117 tcg_temp_free_i32(t0); \
8118 tcg_temp_free_i32(t1); \
8120 #else
8121 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
8122 static inline void gen_##name(DisasContext *ctx) \
8124 if (unlikely(!ctx->spe_enabled)) { \
8125 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8126 return; \
8128 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
8129 rA(ctx->opcode)); \
8130 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
8131 rA(ctx->opcode)); \
8133 #endif
8134 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
8135 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
8137 /* SPE comparison */
8138 #if defined(TARGET_PPC64)
8139 #define GEN_SPEOP_COMP(name, tcg_cond) \
8140 static inline void gen_##name(DisasContext *ctx) \
8142 if (unlikely(!ctx->spe_enabled)) { \
8143 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8144 return; \
8146 int l1 = gen_new_label(); \
8147 int l2 = gen_new_label(); \
8148 int l3 = gen_new_label(); \
8149 int l4 = gen_new_label(); \
8150 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
8151 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
8152 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
8153 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8154 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8155 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
8156 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
8157 tcg_gen_br(l2); \
8158 gen_set_label(l1); \
8159 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
8160 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
8161 gen_set_label(l2); \
8162 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
8163 tcg_gen_trunc_i64_i32(t0, t2); \
8164 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
8165 tcg_gen_trunc_i64_i32(t1, t2); \
8166 tcg_temp_free_i64(t2); \
8167 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
8168 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
8169 ~(CRF_CH | CRF_CH_AND_CL)); \
8170 tcg_gen_br(l4); \
8171 gen_set_label(l3); \
8172 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
8173 CRF_CH | CRF_CH_OR_CL); \
8174 gen_set_label(l4); \
8175 tcg_temp_free_i32(t0); \
8176 tcg_temp_free_i32(t1); \
8178 #else
8179 #define GEN_SPEOP_COMP(name, tcg_cond) \
8180 static inline void gen_##name(DisasContext *ctx) \
8182 if (unlikely(!ctx->spe_enabled)) { \
8183 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8184 return; \
8186 int l1 = gen_new_label(); \
8187 int l2 = gen_new_label(); \
8188 int l3 = gen_new_label(); \
8189 int l4 = gen_new_label(); \
8191 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
8192 cpu_gpr[rB(ctx->opcode)], l1); \
8193 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
8194 tcg_gen_br(l2); \
8195 gen_set_label(l1); \
8196 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
8197 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
8198 gen_set_label(l2); \
8199 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
8200 cpu_gprh[rB(ctx->opcode)], l3); \
8201 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
8202 ~(CRF_CH | CRF_CH_AND_CL)); \
8203 tcg_gen_br(l4); \
8204 gen_set_label(l3); \
8205 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
8206 CRF_CH | CRF_CH_OR_CL); \
8207 gen_set_label(l4); \
8209 #endif
8210 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
8211 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
8212 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
8213 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
8214 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
8216 /* SPE misc */
8217 static inline void gen_brinc(DisasContext *ctx)
8219 /* Note: brinc is usable even if SPE is disabled */
8220 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
8221 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
8223 static inline void gen_evmergelo(DisasContext *ctx)
8225 if (unlikely(!ctx->spe_enabled)) {
8226 gen_exception(ctx, POWERPC_EXCP_SPEU);
8227 return;
8229 #if defined(TARGET_PPC64)
8230 TCGv t0 = tcg_temp_new();
8231 TCGv t1 = tcg_temp_new();
8232 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
8233 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
8234 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
8235 tcg_temp_free(t0);
8236 tcg_temp_free(t1);
8237 #else
8238 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8239 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
8240 #endif
8242 static inline void gen_evmergehilo(DisasContext *ctx)
8244 if (unlikely(!ctx->spe_enabled)) {
8245 gen_exception(ctx, POWERPC_EXCP_SPEU);
8246 return;
8248 #if defined(TARGET_PPC64)
8249 TCGv t0 = tcg_temp_new();
8250 TCGv t1 = tcg_temp_new();
8251 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
8252 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
8253 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
8254 tcg_temp_free(t0);
8255 tcg_temp_free(t1);
8256 #else
8257 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
8258 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
8259 #endif
8261 static inline void gen_evmergelohi(DisasContext *ctx)
8263 if (unlikely(!ctx->spe_enabled)) {
8264 gen_exception(ctx, POWERPC_EXCP_SPEU);
8265 return;
8267 #if defined(TARGET_PPC64)
8268 TCGv t0 = tcg_temp_new();
8269 TCGv t1 = tcg_temp_new();
8270 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
8271 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
8272 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
8273 tcg_temp_free(t0);
8274 tcg_temp_free(t1);
8275 #else
8276 if (rD(ctx->opcode) == rA(ctx->opcode)) {
8277 TCGv_i32 tmp = tcg_temp_new_i32();
8278 tcg_gen_mov_i32(tmp, cpu_gpr[rA(ctx->opcode)]);
8279 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
8280 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], tmp);
8281 tcg_temp_free_i32(tmp);
8282 } else {
8283 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
8284 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8286 #endif
8288 static inline void gen_evsplati(DisasContext *ctx)
8290 uint64_t imm = ((int32_t)(rA(ctx->opcode) << 27)) >> 27;
8292 #if defined(TARGET_PPC64)
8293 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
8294 #else
8295 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
8296 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
8297 #endif
8299 static inline void gen_evsplatfi(DisasContext *ctx)
8301 uint64_t imm = rA(ctx->opcode) << 27;
8303 #if defined(TARGET_PPC64)
8304 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], (imm << 32) | imm);
8305 #else
8306 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
8307 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
8308 #endif
8311 static inline void gen_evsel(DisasContext *ctx)
8313 int l1 = gen_new_label();
8314 int l2 = gen_new_label();
8315 int l3 = gen_new_label();
8316 int l4 = gen_new_label();
8317 TCGv_i32 t0 = tcg_temp_local_new_i32();
8318 #if defined(TARGET_PPC64)
8319 TCGv t1 = tcg_temp_local_new();
8320 TCGv t2 = tcg_temp_local_new();
8321 #endif
8322 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
8323 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
8324 #if defined(TARGET_PPC64)
8325 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
8326 #else
8327 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
8328 #endif
8329 tcg_gen_br(l2);
8330 gen_set_label(l1);
8331 #if defined(TARGET_PPC64)
8332 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
8333 #else
8334 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
8335 #endif
8336 gen_set_label(l2);
8337 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
8338 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
8339 #if defined(TARGET_PPC64)
8340 tcg_gen_ext32u_tl(t2, cpu_gpr[rA(ctx->opcode)]);
8341 #else
8342 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
8343 #endif
8344 tcg_gen_br(l4);
8345 gen_set_label(l3);
8346 #if defined(TARGET_PPC64)
8347 tcg_gen_ext32u_tl(t2, cpu_gpr[rB(ctx->opcode)]);
8348 #else
8349 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
8350 #endif
8351 gen_set_label(l4);
8352 tcg_temp_free_i32(t0);
8353 #if defined(TARGET_PPC64)
8354 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
8355 tcg_temp_free(t1);
8356 tcg_temp_free(t2);
8357 #endif
8360 static void gen_evsel0(DisasContext *ctx)
8362 gen_evsel(ctx);
8365 static void gen_evsel1(DisasContext *ctx)
8367 gen_evsel(ctx);
8370 static void gen_evsel2(DisasContext *ctx)
8372 gen_evsel(ctx);
8375 static void gen_evsel3(DisasContext *ctx)
8377 gen_evsel(ctx);
8380 /* Multiply */
8382 static inline void gen_evmwumi(DisasContext *ctx)
8384 TCGv_i64 t0, t1;
8386 if (unlikely(!ctx->spe_enabled)) {
8387 gen_exception(ctx, POWERPC_EXCP_SPEU);
8388 return;
8391 t0 = tcg_temp_new_i64();
8392 t1 = tcg_temp_new_i64();
8394 /* t0 := rA; t1 := rB */
8395 #if defined(TARGET_PPC64)
8396 tcg_gen_ext32u_tl(t0, cpu_gpr[rA(ctx->opcode)]);
8397 tcg_gen_ext32u_tl(t1, cpu_gpr[rB(ctx->opcode)]);
8398 #else
8399 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
8400 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
8401 #endif
8403 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
8405 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
8407 tcg_temp_free_i64(t0);
8408 tcg_temp_free_i64(t1);
8411 static inline void gen_evmwumia(DisasContext *ctx)
8413 TCGv_i64 tmp;
8415 if (unlikely(!ctx->spe_enabled)) {
8416 gen_exception(ctx, POWERPC_EXCP_SPEU);
8417 return;
8420 gen_evmwumi(ctx); /* rD := rA * rB */
8422 tmp = tcg_temp_new_i64();
8424 /* acc := rD */
8425 gen_load_gpr64(tmp, rD(ctx->opcode));
8426 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
8427 tcg_temp_free_i64(tmp);
8430 static inline void gen_evmwumiaa(DisasContext *ctx)
8432 TCGv_i64 acc;
8433 TCGv_i64 tmp;
8435 if (unlikely(!ctx->spe_enabled)) {
8436 gen_exception(ctx, POWERPC_EXCP_SPEU);
8437 return;
8440 gen_evmwumi(ctx); /* rD := rA * rB */
8442 acc = tcg_temp_new_i64();
8443 tmp = tcg_temp_new_i64();
8445 /* tmp := rD */
8446 gen_load_gpr64(tmp, rD(ctx->opcode));
8448 /* Load acc */
8449 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
8451 /* acc := tmp + acc */
8452 tcg_gen_add_i64(acc, acc, tmp);
8454 /* Store acc */
8455 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
8457 /* rD := acc */
8458 gen_store_gpr64(rD(ctx->opcode), acc);
8460 tcg_temp_free_i64(acc);
8461 tcg_temp_free_i64(tmp);
8464 static inline void gen_evmwsmi(DisasContext *ctx)
8466 TCGv_i64 t0, t1;
8468 if (unlikely(!ctx->spe_enabled)) {
8469 gen_exception(ctx, POWERPC_EXCP_SPEU);
8470 return;
8473 t0 = tcg_temp_new_i64();
8474 t1 = tcg_temp_new_i64();
8476 /* t0 := rA; t1 := rB */
8477 #if defined(TARGET_PPC64)
8478 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
8479 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
8480 #else
8481 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
8482 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
8483 #endif
8485 tcg_gen_mul_i64(t0, t0, t1); /* t0 := rA * rB */
8487 gen_store_gpr64(rD(ctx->opcode), t0); /* rD := t0 */
8489 tcg_temp_free_i64(t0);
8490 tcg_temp_free_i64(t1);
8493 static inline void gen_evmwsmia(DisasContext *ctx)
8495 TCGv_i64 tmp;
8497 gen_evmwsmi(ctx); /* rD := rA * rB */
8499 tmp = tcg_temp_new_i64();
8501 /* acc := rD */
8502 gen_load_gpr64(tmp, rD(ctx->opcode));
8503 tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUPPCState, spe_acc));
8505 tcg_temp_free_i64(tmp);
8508 static inline void gen_evmwsmiaa(DisasContext *ctx)
8510 TCGv_i64 acc = tcg_temp_new_i64();
8511 TCGv_i64 tmp = tcg_temp_new_i64();
8513 gen_evmwsmi(ctx); /* rD := rA * rB */
8515 acc = tcg_temp_new_i64();
8516 tmp = tcg_temp_new_i64();
8518 /* tmp := rD */
8519 gen_load_gpr64(tmp, rD(ctx->opcode));
8521 /* Load acc */
8522 tcg_gen_ld_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
8524 /* acc := tmp + acc */
8525 tcg_gen_add_i64(acc, acc, tmp);
8527 /* Store acc */
8528 tcg_gen_st_i64(acc, cpu_env, offsetof(CPUPPCState, spe_acc));
8530 /* rD := acc */
8531 gen_store_gpr64(rD(ctx->opcode), acc);
8533 tcg_temp_free_i64(acc);
8534 tcg_temp_free_i64(tmp);
8537 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
8538 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8539 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
8540 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8541 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
8542 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
8543 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE); ////
8544 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE); //
8545 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE);
8546 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
8547 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
8548 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
8549 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
8550 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8551 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8552 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8553 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE); ////
8554 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
8555 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
8556 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE);
8557 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE); ////
8558 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8559 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE); //
8560 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE);
8561 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
8562 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE); ////
8563 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
8564 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE); ////
8565 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE); ////
8567 /* SPE load and stores */
8568 static inline void gen_addr_spe_imm_index(DisasContext *ctx, TCGv EA, int sh)
8570 target_ulong uimm = rB(ctx->opcode);
8572 if (rA(ctx->opcode) == 0) {
8573 tcg_gen_movi_tl(EA, uimm << sh);
8574 } else {
8575 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], uimm << sh);
8576 if (NARROW_MODE(ctx)) {
8577 tcg_gen_ext32u_tl(EA, EA);
8582 static inline void gen_op_evldd(DisasContext *ctx, TCGv addr)
8584 #if defined(TARGET_PPC64)
8585 gen_qemu_ld64(ctx, cpu_gpr[rD(ctx->opcode)], addr);
8586 #else
8587 TCGv_i64 t0 = tcg_temp_new_i64();
8588 gen_qemu_ld64(ctx, t0, addr);
8589 tcg_gen_trunc_i64_i32(cpu_gpr[rD(ctx->opcode)], t0);
8590 tcg_gen_shri_i64(t0, t0, 32);
8591 tcg_gen_trunc_i64_i32(cpu_gprh[rD(ctx->opcode)], t0);
8592 tcg_temp_free_i64(t0);
8593 #endif
8596 static inline void gen_op_evldw(DisasContext *ctx, TCGv addr)
8598 #if defined(TARGET_PPC64)
8599 TCGv t0 = tcg_temp_new();
8600 gen_qemu_ld32u(ctx, t0, addr);
8601 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
8602 gen_addr_add(ctx, addr, addr, 4);
8603 gen_qemu_ld32u(ctx, t0, addr);
8604 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8605 tcg_temp_free(t0);
8606 #else
8607 gen_qemu_ld32u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
8608 gen_addr_add(ctx, addr, addr, 4);
8609 gen_qemu_ld32u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
8610 #endif
8613 static inline void gen_op_evldh(DisasContext *ctx, TCGv addr)
8615 TCGv t0 = tcg_temp_new();
8616 #if defined(TARGET_PPC64)
8617 gen_qemu_ld16u(ctx, t0, addr);
8618 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
8619 gen_addr_add(ctx, addr, addr, 2);
8620 gen_qemu_ld16u(ctx, t0, addr);
8621 tcg_gen_shli_tl(t0, t0, 32);
8622 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8623 gen_addr_add(ctx, addr, addr, 2);
8624 gen_qemu_ld16u(ctx, t0, addr);
8625 tcg_gen_shli_tl(t0, t0, 16);
8626 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8627 gen_addr_add(ctx, addr, addr, 2);
8628 gen_qemu_ld16u(ctx, t0, addr);
8629 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8630 #else
8631 gen_qemu_ld16u(ctx, t0, addr);
8632 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
8633 gen_addr_add(ctx, addr, addr, 2);
8634 gen_qemu_ld16u(ctx, t0, addr);
8635 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
8636 gen_addr_add(ctx, addr, addr, 2);
8637 gen_qemu_ld16u(ctx, t0, addr);
8638 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
8639 gen_addr_add(ctx, addr, addr, 2);
8640 gen_qemu_ld16u(ctx, t0, addr);
8641 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8642 #endif
8643 tcg_temp_free(t0);
8646 static inline void gen_op_evlhhesplat(DisasContext *ctx, TCGv addr)
8648 TCGv t0 = tcg_temp_new();
8649 gen_qemu_ld16u(ctx, t0, addr);
8650 #if defined(TARGET_PPC64)
8651 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
8652 tcg_gen_shli_tl(t0, t0, 16);
8653 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8654 #else
8655 tcg_gen_shli_tl(t0, t0, 16);
8656 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
8657 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
8658 #endif
8659 tcg_temp_free(t0);
8662 static inline void gen_op_evlhhousplat(DisasContext *ctx, TCGv addr)
8664 TCGv t0 = tcg_temp_new();
8665 gen_qemu_ld16u(ctx, t0, addr);
8666 #if defined(TARGET_PPC64)
8667 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
8668 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8669 #else
8670 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
8671 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
8672 #endif
8673 tcg_temp_free(t0);
8676 static inline void gen_op_evlhhossplat(DisasContext *ctx, TCGv addr)
8678 TCGv t0 = tcg_temp_new();
8679 gen_qemu_ld16s(ctx, t0, addr);
8680 #if defined(TARGET_PPC64)
8681 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
8682 tcg_gen_ext32u_tl(t0, t0);
8683 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8684 #else
8685 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
8686 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
8687 #endif
8688 tcg_temp_free(t0);
8691 static inline void gen_op_evlwhe(DisasContext *ctx, TCGv addr)
8693 TCGv t0 = tcg_temp_new();
8694 #if defined(TARGET_PPC64)
8695 gen_qemu_ld16u(ctx, t0, addr);
8696 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
8697 gen_addr_add(ctx, addr, addr, 2);
8698 gen_qemu_ld16u(ctx, t0, addr);
8699 tcg_gen_shli_tl(t0, t0, 16);
8700 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8701 #else
8702 gen_qemu_ld16u(ctx, t0, addr);
8703 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
8704 gen_addr_add(ctx, addr, addr, 2);
8705 gen_qemu_ld16u(ctx, t0, addr);
8706 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
8707 #endif
8708 tcg_temp_free(t0);
8711 static inline void gen_op_evlwhou(DisasContext *ctx, TCGv addr)
8713 #if defined(TARGET_PPC64)
8714 TCGv t0 = tcg_temp_new();
8715 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
8716 gen_addr_add(ctx, addr, addr, 2);
8717 gen_qemu_ld16u(ctx, t0, addr);
8718 tcg_gen_shli_tl(t0, t0, 32);
8719 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8720 tcg_temp_free(t0);
8721 #else
8722 gen_qemu_ld16u(ctx, cpu_gprh[rD(ctx->opcode)], addr);
8723 gen_addr_add(ctx, addr, addr, 2);
8724 gen_qemu_ld16u(ctx, cpu_gpr[rD(ctx->opcode)], addr);
8725 #endif
8728 static inline void gen_op_evlwhos(DisasContext *ctx, TCGv addr)
8730 #if defined(TARGET_PPC64)
8731 TCGv t0 = tcg_temp_new();
8732 gen_qemu_ld16s(ctx, t0, addr);
8733 tcg_gen_ext32u_tl(cpu_gpr[rD(ctx->opcode)], t0);
8734 gen_addr_add(ctx, addr, addr, 2);
8735 gen_qemu_ld16s(ctx, t0, addr);
8736 tcg_gen_shli_tl(t0, t0, 32);
8737 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8738 tcg_temp_free(t0);
8739 #else
8740 gen_qemu_ld16s(ctx, cpu_gprh[rD(ctx->opcode)], addr);
8741 gen_addr_add(ctx, addr, addr, 2);
8742 gen_qemu_ld16s(ctx, cpu_gpr[rD(ctx->opcode)], addr);
8743 #endif
8746 static inline void gen_op_evlwwsplat(DisasContext *ctx, TCGv addr)
8748 TCGv t0 = tcg_temp_new();
8749 gen_qemu_ld32u(ctx, t0, addr);
8750 #if defined(TARGET_PPC64)
8751 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 32);
8752 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8753 #else
8754 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], t0);
8755 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
8756 #endif
8757 tcg_temp_free(t0);
8760 static inline void gen_op_evlwhsplat(DisasContext *ctx, TCGv addr)
8762 TCGv t0 = tcg_temp_new();
8763 #if defined(TARGET_PPC64)
8764 gen_qemu_ld16u(ctx, t0, addr);
8765 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 48);
8766 tcg_gen_shli_tl(t0, t0, 32);
8767 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8768 gen_addr_add(ctx, addr, addr, 2);
8769 gen_qemu_ld16u(ctx, t0, addr);
8770 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8771 tcg_gen_shli_tl(t0, t0, 16);
8772 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t0);
8773 #else
8774 gen_qemu_ld16u(ctx, t0, addr);
8775 tcg_gen_shli_tl(cpu_gprh[rD(ctx->opcode)], t0, 16);
8776 tcg_gen_or_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
8777 gen_addr_add(ctx, addr, addr, 2);
8778 gen_qemu_ld16u(ctx, t0, addr);
8779 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], t0, 16);
8780 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rD(ctx->opcode)], t0);
8781 #endif
8782 tcg_temp_free(t0);
8785 static inline void gen_op_evstdd(DisasContext *ctx, TCGv addr)
8787 #if defined(TARGET_PPC64)
8788 gen_qemu_st64(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8789 #else
8790 TCGv_i64 t0 = tcg_temp_new_i64();
8791 tcg_gen_concat_i32_i64(t0, cpu_gpr[rS(ctx->opcode)], cpu_gprh[rS(ctx->opcode)]);
8792 gen_qemu_st64(ctx, t0, addr);
8793 tcg_temp_free_i64(t0);
8794 #endif
8797 static inline void gen_op_evstdw(DisasContext *ctx, TCGv addr)
8799 #if defined(TARGET_PPC64)
8800 TCGv t0 = tcg_temp_new();
8801 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8802 gen_qemu_st32(ctx, t0, addr);
8803 tcg_temp_free(t0);
8804 #else
8805 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8806 #endif
8807 gen_addr_add(ctx, addr, addr, 4);
8808 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8811 static inline void gen_op_evstdh(DisasContext *ctx, TCGv addr)
8813 TCGv t0 = tcg_temp_new();
8814 #if defined(TARGET_PPC64)
8815 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8816 #else
8817 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8818 #endif
8819 gen_qemu_st16(ctx, t0, addr);
8820 gen_addr_add(ctx, addr, addr, 2);
8821 #if defined(TARGET_PPC64)
8822 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8823 gen_qemu_st16(ctx, t0, addr);
8824 #else
8825 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8826 #endif
8827 gen_addr_add(ctx, addr, addr, 2);
8828 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8829 gen_qemu_st16(ctx, t0, addr);
8830 tcg_temp_free(t0);
8831 gen_addr_add(ctx, addr, addr, 2);
8832 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8835 static inline void gen_op_evstwhe(DisasContext *ctx, TCGv addr)
8837 TCGv t0 = tcg_temp_new();
8838 #if defined(TARGET_PPC64)
8839 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 48);
8840 #else
8841 tcg_gen_shri_tl(t0, cpu_gprh[rS(ctx->opcode)], 16);
8842 #endif
8843 gen_qemu_st16(ctx, t0, addr);
8844 gen_addr_add(ctx, addr, addr, 2);
8845 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 16);
8846 gen_qemu_st16(ctx, t0, addr);
8847 tcg_temp_free(t0);
8850 static inline void gen_op_evstwho(DisasContext *ctx, TCGv addr)
8852 #if defined(TARGET_PPC64)
8853 TCGv t0 = tcg_temp_new();
8854 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8855 gen_qemu_st16(ctx, t0, addr);
8856 tcg_temp_free(t0);
8857 #else
8858 gen_qemu_st16(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8859 #endif
8860 gen_addr_add(ctx, addr, addr, 2);
8861 gen_qemu_st16(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8864 static inline void gen_op_evstwwe(DisasContext *ctx, TCGv addr)
8866 #if defined(TARGET_PPC64)
8867 TCGv t0 = tcg_temp_new();
8868 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], 32);
8869 gen_qemu_st32(ctx, t0, addr);
8870 tcg_temp_free(t0);
8871 #else
8872 gen_qemu_st32(ctx, cpu_gprh[rS(ctx->opcode)], addr);
8873 #endif
8876 static inline void gen_op_evstwwo(DisasContext *ctx, TCGv addr)
8878 gen_qemu_st32(ctx, cpu_gpr[rS(ctx->opcode)], addr);
8881 #define GEN_SPEOP_LDST(name, opc2, sh) \
8882 static void glue(gen_, name)(DisasContext *ctx) \
8884 TCGv t0; \
8885 if (unlikely(!ctx->spe_enabled)) { \
8886 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8887 return; \
8889 gen_set_access_type(ctx, ACCESS_INT); \
8890 t0 = tcg_temp_new(); \
8891 if (Rc(ctx->opcode)) { \
8892 gen_addr_spe_imm_index(ctx, t0, sh); \
8893 } else { \
8894 gen_addr_reg_index(ctx, t0); \
8896 gen_op_##name(ctx, t0); \
8897 tcg_temp_free(t0); \
8900 GEN_SPEOP_LDST(evldd, 0x00, 3);
8901 GEN_SPEOP_LDST(evldw, 0x01, 3);
8902 GEN_SPEOP_LDST(evldh, 0x02, 3);
8903 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1);
8904 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1);
8905 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1);
8906 GEN_SPEOP_LDST(evlwhe, 0x08, 2);
8907 GEN_SPEOP_LDST(evlwhou, 0x0A, 2);
8908 GEN_SPEOP_LDST(evlwhos, 0x0B, 2);
8909 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2);
8910 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2);
8912 GEN_SPEOP_LDST(evstdd, 0x10, 3);
8913 GEN_SPEOP_LDST(evstdw, 0x11, 3);
8914 GEN_SPEOP_LDST(evstdh, 0x12, 3);
8915 GEN_SPEOP_LDST(evstwhe, 0x18, 2);
8916 GEN_SPEOP_LDST(evstwho, 0x1A, 2);
8917 GEN_SPEOP_LDST(evstwwe, 0x1C, 2);
8918 GEN_SPEOP_LDST(evstwwo, 0x1E, 2);
8920 /* Multiply and add - TODO */
8921 #if 0
8922 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);//
8923 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8924 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8925 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8926 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8927 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8928 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8929 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8930 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8931 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8932 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE);
8933 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8935 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8936 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8937 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8938 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8939 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8940 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8941 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8942 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE);
8943 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE);
8944 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8945 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8946 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8948 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8949 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8950 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8951 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE);
8952 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE);
8954 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8955 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8956 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8957 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8958 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8959 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8960 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8961 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8962 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8963 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8964 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE);
8965 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8967 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8968 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE);
8969 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8970 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8972 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8973 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8974 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8975 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8976 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8977 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8978 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8979 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8980 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8981 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8982 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE);
8983 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8985 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8986 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8987 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8988 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE);
8989 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE);
8990 #endif
8992 /*** SPE floating-point extension ***/
8993 #if defined(TARGET_PPC64)
8994 #define GEN_SPEFPUOP_CONV_32_32(name) \
8995 static inline void gen_##name(DisasContext *ctx) \
8997 TCGv_i32 t0; \
8998 TCGv t1; \
8999 t0 = tcg_temp_new_i32(); \
9000 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
9001 gen_helper_##name(t0, cpu_env, t0); \
9002 t1 = tcg_temp_new(); \
9003 tcg_gen_extu_i32_tl(t1, t0); \
9004 tcg_temp_free_i32(t0); \
9005 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
9006 0xFFFFFFFF00000000ULL); \
9007 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
9008 tcg_temp_free(t1); \
9010 #define GEN_SPEFPUOP_CONV_32_64(name) \
9011 static inline void gen_##name(DisasContext *ctx) \
9013 TCGv_i32 t0; \
9014 TCGv t1; \
9015 t0 = tcg_temp_new_i32(); \
9016 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
9017 t1 = tcg_temp_new(); \
9018 tcg_gen_extu_i32_tl(t1, t0); \
9019 tcg_temp_free_i32(t0); \
9020 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
9021 0xFFFFFFFF00000000ULL); \
9022 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
9023 tcg_temp_free(t1); \
9025 #define GEN_SPEFPUOP_CONV_64_32(name) \
9026 static inline void gen_##name(DisasContext *ctx) \
9028 TCGv_i32 t0 = tcg_temp_new_i32(); \
9029 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
9030 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
9031 tcg_temp_free_i32(t0); \
9033 #define GEN_SPEFPUOP_CONV_64_64(name) \
9034 static inline void gen_##name(DisasContext *ctx) \
9036 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
9037 cpu_gpr[rB(ctx->opcode)]); \
9039 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
9040 static inline void gen_##name(DisasContext *ctx) \
9042 TCGv_i32 t0, t1; \
9043 TCGv_i64 t2; \
9044 if (unlikely(!ctx->spe_enabled)) { \
9045 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9046 return; \
9048 t0 = tcg_temp_new_i32(); \
9049 t1 = tcg_temp_new_i32(); \
9050 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
9051 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
9052 gen_helper_##name(t0, cpu_env, t0, t1); \
9053 tcg_temp_free_i32(t1); \
9054 t2 = tcg_temp_new(); \
9055 tcg_gen_extu_i32_tl(t2, t0); \
9056 tcg_temp_free_i32(t0); \
9057 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
9058 0xFFFFFFFF00000000ULL); \
9059 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
9060 tcg_temp_free(t2); \
9062 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
9063 static inline void gen_##name(DisasContext *ctx) \
9065 if (unlikely(!ctx->spe_enabled)) { \
9066 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9067 return; \
9069 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
9070 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
9072 #define GEN_SPEFPUOP_COMP_32(name) \
9073 static inline void gen_##name(DisasContext *ctx) \
9075 TCGv_i32 t0, t1; \
9076 if (unlikely(!ctx->spe_enabled)) { \
9077 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9078 return; \
9080 t0 = tcg_temp_new_i32(); \
9081 t1 = tcg_temp_new_i32(); \
9082 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
9083 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
9084 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
9085 tcg_temp_free_i32(t0); \
9086 tcg_temp_free_i32(t1); \
9088 #define GEN_SPEFPUOP_COMP_64(name) \
9089 static inline void gen_##name(DisasContext *ctx) \
9091 if (unlikely(!ctx->spe_enabled)) { \
9092 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9093 return; \
9095 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
9096 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
9098 #else
9099 #define GEN_SPEFPUOP_CONV_32_32(name) \
9100 static inline void gen_##name(DisasContext *ctx) \
9102 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
9103 cpu_gpr[rB(ctx->opcode)]); \
9105 #define GEN_SPEFPUOP_CONV_32_64(name) \
9106 static inline void gen_##name(DisasContext *ctx) \
9108 TCGv_i64 t0 = tcg_temp_new_i64(); \
9109 gen_load_gpr64(t0, rB(ctx->opcode)); \
9110 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
9111 tcg_temp_free_i64(t0); \
9113 #define GEN_SPEFPUOP_CONV_64_32(name) \
9114 static inline void gen_##name(DisasContext *ctx) \
9116 TCGv_i64 t0 = tcg_temp_new_i64(); \
9117 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
9118 gen_store_gpr64(rD(ctx->opcode), t0); \
9119 tcg_temp_free_i64(t0); \
9121 #define GEN_SPEFPUOP_CONV_64_64(name) \
9122 static inline void gen_##name(DisasContext *ctx) \
9124 TCGv_i64 t0 = tcg_temp_new_i64(); \
9125 gen_load_gpr64(t0, rB(ctx->opcode)); \
9126 gen_helper_##name(t0, cpu_env, t0); \
9127 gen_store_gpr64(rD(ctx->opcode), t0); \
9128 tcg_temp_free_i64(t0); \
9130 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
9131 static inline void gen_##name(DisasContext *ctx) \
9133 if (unlikely(!ctx->spe_enabled)) { \
9134 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9135 return; \
9137 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
9138 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
9140 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
9141 static inline void gen_##name(DisasContext *ctx) \
9143 TCGv_i64 t0, t1; \
9144 if (unlikely(!ctx->spe_enabled)) { \
9145 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9146 return; \
9148 t0 = tcg_temp_new_i64(); \
9149 t1 = tcg_temp_new_i64(); \
9150 gen_load_gpr64(t0, rA(ctx->opcode)); \
9151 gen_load_gpr64(t1, rB(ctx->opcode)); \
9152 gen_helper_##name(t0, cpu_env, t0, t1); \
9153 gen_store_gpr64(rD(ctx->opcode), t0); \
9154 tcg_temp_free_i64(t0); \
9155 tcg_temp_free_i64(t1); \
9157 #define GEN_SPEFPUOP_COMP_32(name) \
9158 static inline void gen_##name(DisasContext *ctx) \
9160 if (unlikely(!ctx->spe_enabled)) { \
9161 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9162 return; \
9164 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
9165 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
9167 #define GEN_SPEFPUOP_COMP_64(name) \
9168 static inline void gen_##name(DisasContext *ctx) \
9170 TCGv_i64 t0, t1; \
9171 if (unlikely(!ctx->spe_enabled)) { \
9172 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9173 return; \
9175 t0 = tcg_temp_new_i64(); \
9176 t1 = tcg_temp_new_i64(); \
9177 gen_load_gpr64(t0, rA(ctx->opcode)); \
9178 gen_load_gpr64(t1, rB(ctx->opcode)); \
9179 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
9180 tcg_temp_free_i64(t0); \
9181 tcg_temp_free_i64(t1); \
9183 #endif
9185 /* Single precision floating-point vectors operations */
9186 /* Arithmetic */
9187 GEN_SPEFPUOP_ARITH2_64_64(evfsadd);
9188 GEN_SPEFPUOP_ARITH2_64_64(evfssub);
9189 GEN_SPEFPUOP_ARITH2_64_64(evfsmul);
9190 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv);
9191 static inline void gen_evfsabs(DisasContext *ctx)
9193 if (unlikely(!ctx->spe_enabled)) {
9194 gen_exception(ctx, POWERPC_EXCP_SPEU);
9195 return;
9197 #if defined(TARGET_PPC64)
9198 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000080000000LL);
9199 #else
9200 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x80000000);
9201 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
9202 #endif
9204 static inline void gen_evfsnabs(DisasContext *ctx)
9206 if (unlikely(!ctx->spe_enabled)) {
9207 gen_exception(ctx, POWERPC_EXCP_SPEU);
9208 return;
9210 #if defined(TARGET_PPC64)
9211 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
9212 #else
9213 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
9214 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
9215 #endif
9217 static inline void gen_evfsneg(DisasContext *ctx)
9219 if (unlikely(!ctx->spe_enabled)) {
9220 gen_exception(ctx, POWERPC_EXCP_SPEU);
9221 return;
9223 #if defined(TARGET_PPC64)
9224 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000080000000LL);
9225 #else
9226 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
9227 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
9228 #endif
9231 /* Conversion */
9232 GEN_SPEFPUOP_CONV_64_64(evfscfui);
9233 GEN_SPEFPUOP_CONV_64_64(evfscfsi);
9234 GEN_SPEFPUOP_CONV_64_64(evfscfuf);
9235 GEN_SPEFPUOP_CONV_64_64(evfscfsf);
9236 GEN_SPEFPUOP_CONV_64_64(evfsctui);
9237 GEN_SPEFPUOP_CONV_64_64(evfsctsi);
9238 GEN_SPEFPUOP_CONV_64_64(evfsctuf);
9239 GEN_SPEFPUOP_CONV_64_64(evfsctsf);
9240 GEN_SPEFPUOP_CONV_64_64(evfsctuiz);
9241 GEN_SPEFPUOP_CONV_64_64(evfsctsiz);
9243 /* Comparison */
9244 GEN_SPEFPUOP_COMP_64(evfscmpgt);
9245 GEN_SPEFPUOP_COMP_64(evfscmplt);
9246 GEN_SPEFPUOP_COMP_64(evfscmpeq);
9247 GEN_SPEFPUOP_COMP_64(evfststgt);
9248 GEN_SPEFPUOP_COMP_64(evfststlt);
9249 GEN_SPEFPUOP_COMP_64(evfststeq);
9251 /* Opcodes definitions */
9252 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
9253 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
9254 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9255 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
9256 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
9257 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9258 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9259 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9260 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9261 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9262 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9263 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9264 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
9265 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9267 /* Single precision floating-point operations */
9268 /* Arithmetic */
9269 GEN_SPEFPUOP_ARITH2_32_32(efsadd);
9270 GEN_SPEFPUOP_ARITH2_32_32(efssub);
9271 GEN_SPEFPUOP_ARITH2_32_32(efsmul);
9272 GEN_SPEFPUOP_ARITH2_32_32(efsdiv);
9273 static inline void gen_efsabs(DisasContext *ctx)
9275 if (unlikely(!ctx->spe_enabled)) {
9276 gen_exception(ctx, POWERPC_EXCP_SPEU);
9277 return;
9279 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], (target_long)~0x80000000LL);
9281 static inline void gen_efsnabs(DisasContext *ctx)
9283 if (unlikely(!ctx->spe_enabled)) {
9284 gen_exception(ctx, POWERPC_EXCP_SPEU);
9285 return;
9287 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
9289 static inline void gen_efsneg(DisasContext *ctx)
9291 if (unlikely(!ctx->spe_enabled)) {
9292 gen_exception(ctx, POWERPC_EXCP_SPEU);
9293 return;
9295 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x80000000);
9298 /* Conversion */
9299 GEN_SPEFPUOP_CONV_32_32(efscfui);
9300 GEN_SPEFPUOP_CONV_32_32(efscfsi);
9301 GEN_SPEFPUOP_CONV_32_32(efscfuf);
9302 GEN_SPEFPUOP_CONV_32_32(efscfsf);
9303 GEN_SPEFPUOP_CONV_32_32(efsctui);
9304 GEN_SPEFPUOP_CONV_32_32(efsctsi);
9305 GEN_SPEFPUOP_CONV_32_32(efsctuf);
9306 GEN_SPEFPUOP_CONV_32_32(efsctsf);
9307 GEN_SPEFPUOP_CONV_32_32(efsctuiz);
9308 GEN_SPEFPUOP_CONV_32_32(efsctsiz);
9309 GEN_SPEFPUOP_CONV_32_64(efscfd);
9311 /* Comparison */
9312 GEN_SPEFPUOP_COMP_32(efscmpgt);
9313 GEN_SPEFPUOP_COMP_32(efscmplt);
9314 GEN_SPEFPUOP_COMP_32(efscmpeq);
9315 GEN_SPEFPUOP_COMP_32(efststgt);
9316 GEN_SPEFPUOP_COMP_32(efststlt);
9317 GEN_SPEFPUOP_COMP_32(efststeq);
9319 /* Opcodes definitions */
9320 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
9321 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE); //
9322 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9323 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE); //
9324 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
9325 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE); //
9326 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9327 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9328 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9329 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE); //
9330 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9331 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9332 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE); //
9333 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE); //
9335 /* Double precision floating-point operations */
9336 /* Arithmetic */
9337 GEN_SPEFPUOP_ARITH2_64_64(efdadd);
9338 GEN_SPEFPUOP_ARITH2_64_64(efdsub);
9339 GEN_SPEFPUOP_ARITH2_64_64(efdmul);
9340 GEN_SPEFPUOP_ARITH2_64_64(efddiv);
9341 static inline void gen_efdabs(DisasContext *ctx)
9343 if (unlikely(!ctx->spe_enabled)) {
9344 gen_exception(ctx, POWERPC_EXCP_SPEU);
9345 return;
9347 #if defined(TARGET_PPC64)
9348 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], ~0x8000000000000000LL);
9349 #else
9350 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
9351 tcg_gen_andi_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], ~0x80000000);
9352 #endif
9354 static inline void gen_efdnabs(DisasContext *ctx)
9356 if (unlikely(!ctx->spe_enabled)) {
9357 gen_exception(ctx, POWERPC_EXCP_SPEU);
9358 return;
9360 #if defined(TARGET_PPC64)
9361 tcg_gen_ori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
9362 #else
9363 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
9364 tcg_gen_ori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
9365 #endif
9367 static inline void gen_efdneg(DisasContext *ctx)
9369 if (unlikely(!ctx->spe_enabled)) {
9370 gen_exception(ctx, POWERPC_EXCP_SPEU);
9371 return;
9373 #if defined(TARGET_PPC64)
9374 tcg_gen_xori_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0x8000000000000000LL);
9375 #else
9376 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
9377 tcg_gen_xori_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], 0x80000000);
9378 #endif
9381 /* Conversion */
9382 GEN_SPEFPUOP_CONV_64_32(efdcfui);
9383 GEN_SPEFPUOP_CONV_64_32(efdcfsi);
9384 GEN_SPEFPUOP_CONV_64_32(efdcfuf);
9385 GEN_SPEFPUOP_CONV_64_32(efdcfsf);
9386 GEN_SPEFPUOP_CONV_32_64(efdctui);
9387 GEN_SPEFPUOP_CONV_32_64(efdctsi);
9388 GEN_SPEFPUOP_CONV_32_64(efdctuf);
9389 GEN_SPEFPUOP_CONV_32_64(efdctsf);
9390 GEN_SPEFPUOP_CONV_32_64(efdctuiz);
9391 GEN_SPEFPUOP_CONV_32_64(efdctsiz);
9392 GEN_SPEFPUOP_CONV_64_32(efdcfs);
9393 GEN_SPEFPUOP_CONV_64_64(efdcfuid);
9394 GEN_SPEFPUOP_CONV_64_64(efdcfsid);
9395 GEN_SPEFPUOP_CONV_64_64(efdctuidz);
9396 GEN_SPEFPUOP_CONV_64_64(efdctsidz);
9398 /* Comparison */
9399 GEN_SPEFPUOP_COMP_64(efdcmpgt);
9400 GEN_SPEFPUOP_COMP_64(efdcmplt);
9401 GEN_SPEFPUOP_COMP_64(efdcmpeq);
9402 GEN_SPEFPUOP_COMP_64(efdtstgt);
9403 GEN_SPEFPUOP_COMP_64(efdtstlt);
9404 GEN_SPEFPUOP_COMP_64(efdtsteq);
9406 /* Opcodes definitions */
9407 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
9408 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9409 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE); //
9410 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
9411 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE); //
9412 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9413 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
9414 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE); //
9415 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9416 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9417 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9418 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE); //
9419 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
9420 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
9421 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE); //
9422 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE); //
9424 static opcode_t opcodes[] = {
9425 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
9426 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
9427 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
9428 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER),
9429 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
9430 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
9431 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
9432 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9433 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9434 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9435 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9436 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
9437 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
9438 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
9439 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
9440 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9441 #if defined(TARGET_PPC64)
9442 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
9443 #endif
9444 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
9445 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
9446 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9447 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9448 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9449 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
9450 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
9451 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
9452 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9453 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9454 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9455 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9456 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB),
9457 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
9458 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
9459 #if defined(TARGET_PPC64)
9460 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
9461 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
9462 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
9463 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
9464 #endif
9465 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9466 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9467 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9468 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
9469 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
9470 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
9471 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
9472 #if defined(TARGET_PPC64)
9473 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
9474 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
9475 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
9476 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
9477 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
9478 #endif
9479 GEN_HANDLER(frsqrtes, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES),
9480 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
9481 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT),
9482 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT),
9483 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT),
9484 GEN_HANDLER(fabs, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT),
9485 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT),
9486 GEN_HANDLER(fnabs, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT),
9487 GEN_HANDLER(fneg, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT),
9488 GEN_HANDLER_E(fcpsgn, 0x3F, 0x08, 0x00, 0x00000000, PPC_NONE, PPC2_ISA205),
9489 GEN_HANDLER_E(fmrgew, 0x3F, 0x06, 0x1E, 0x00000001, PPC_NONE, PPC2_VSX207),
9490 GEN_HANDLER_E(fmrgow, 0x3F, 0x06, 0x1A, 0x00000001, PPC_NONE, PPC2_VSX207),
9491 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT),
9492 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT),
9493 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT),
9494 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT),
9495 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x00000000, PPC_FLOAT),
9496 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006e0800, PPC_FLOAT),
9497 #if defined(TARGET_PPC64)
9498 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
9499 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
9500 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
9501 #endif
9502 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9503 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
9504 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
9505 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
9506 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
9507 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
9508 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO),
9509 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
9510 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
9511 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
9512 #if defined(TARGET_PPC64)
9513 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
9514 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
9515 #endif
9516 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
9517 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
9518 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
9519 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
9520 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
9521 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
9522 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
9523 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
9524 #if defined(TARGET_PPC64)
9525 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
9526 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
9527 #endif
9528 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW),
9529 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
9530 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
9531 #if defined(TARGET_PPC64)
9532 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
9533 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
9534 #endif
9535 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
9536 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
9537 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
9538 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
9539 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
9540 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
9541 #if defined(TARGET_PPC64)
9542 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
9543 #endif
9544 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC),
9545 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC),
9546 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
9547 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
9548 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
9549 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE),
9550 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE),
9551 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
9552 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
9553 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC),
9554 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
9555 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
9556 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
9557 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
9558 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
9559 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
9560 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
9561 #if defined(TARGET_PPC64)
9562 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
9563 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
9564 PPC_SEGMENT_64B),
9565 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
9566 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
9567 PPC_SEGMENT_64B),
9568 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
9569 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
9570 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
9571 #endif
9572 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
9573 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE),
9574 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE),
9575 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
9576 #if defined(TARGET_PPC64)
9577 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI),
9578 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
9579 #endif
9580 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
9581 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
9582 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
9583 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
9584 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
9585 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
9586 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
9587 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
9588 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
9589 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
9590 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
9591 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
9592 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
9593 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
9594 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
9595 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
9596 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
9597 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
9598 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
9599 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
9600 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
9601 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
9602 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
9603 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
9604 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
9605 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
9606 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
9607 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
9608 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
9609 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
9610 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
9611 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
9612 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
9613 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
9614 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
9615 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
9616 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
9617 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
9618 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
9619 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
9620 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
9621 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
9622 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
9623 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
9624 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
9625 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
9626 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
9627 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
9628 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
9629 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
9630 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
9631 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
9632 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
9633 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
9634 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
9635 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
9636 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
9637 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
9638 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
9639 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
9640 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
9641 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
9642 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
9643 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
9644 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
9645 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
9646 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
9647 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
9648 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
9649 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
9650 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
9651 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
9652 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
9653 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
9654 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
9655 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
9656 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
9657 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
9658 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
9659 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
9660 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
9661 PPC_NONE, PPC2_BOOKE206),
9662 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
9663 PPC_NONE, PPC2_BOOKE206),
9664 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
9665 PPC_NONE, PPC2_BOOKE206),
9666 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
9667 PPC_NONE, PPC2_BOOKE206),
9668 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
9669 PPC_NONE, PPC2_BOOKE206),
9670 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
9671 PPC_NONE, PPC2_PRCNTL),
9672 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
9673 PPC_NONE, PPC2_PRCNTL),
9674 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
9675 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
9676 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
9677 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
9678 PPC_BOOKE, PPC2_BOOKE206),
9679 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE),
9680 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
9681 PPC_BOOKE, PPC2_BOOKE206),
9682 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
9683 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
9684 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
9685 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
9686 GEN_HANDLER(vsldoi, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC),
9687 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
9688 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE),
9689 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE),
9690 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE),
9691 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE),
9693 #undef GEN_INT_ARITH_ADD
9694 #undef GEN_INT_ARITH_ADD_CONST
9695 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
9696 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
9697 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
9698 add_ca, compute_ca, compute_ov) \
9699 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
9700 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
9701 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
9702 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
9703 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
9704 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
9705 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
9706 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
9707 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
9708 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
9709 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
9711 #undef GEN_INT_ARITH_DIVW
9712 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
9713 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
9714 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
9715 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
9716 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
9717 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
9719 #if defined(TARGET_PPC64)
9720 #undef GEN_INT_ARITH_DIVD
9721 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
9722 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
9723 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
9724 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
9725 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
9726 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
9728 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
9729 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
9731 #undef GEN_INT_ARITH_MUL_HELPER
9732 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
9733 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
9734 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
9735 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
9736 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
9737 #endif
9739 #undef GEN_INT_ARITH_SUBF
9740 #undef GEN_INT_ARITH_SUBF_CONST
9741 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
9742 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
9743 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
9744 add_ca, compute_ca, compute_ov) \
9745 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
9746 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
9747 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
9748 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
9749 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
9750 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
9751 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
9752 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
9753 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
9754 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
9755 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
9757 #undef GEN_LOGICAL1
9758 #undef GEN_LOGICAL2
9759 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
9760 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
9761 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
9762 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
9763 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
9764 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
9765 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
9766 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
9767 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
9768 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
9769 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
9770 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
9771 #if defined(TARGET_PPC64)
9772 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
9773 #endif
9775 #if defined(TARGET_PPC64)
9776 #undef GEN_PPC64_R2
9777 #undef GEN_PPC64_R4
9778 #define GEN_PPC64_R2(name, opc1, opc2) \
9779 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
9780 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
9781 PPC_64B)
9782 #define GEN_PPC64_R4(name, opc1, opc2) \
9783 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
9784 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
9785 PPC_64B), \
9786 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
9787 PPC_64B), \
9788 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
9789 PPC_64B)
9790 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
9791 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
9792 GEN_PPC64_R4(rldic, 0x1E, 0x04),
9793 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
9794 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
9795 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
9796 #endif
9798 #undef _GEN_FLOAT_ACB
9799 #undef GEN_FLOAT_ACB
9800 #undef _GEN_FLOAT_AB
9801 #undef GEN_FLOAT_AB
9802 #undef _GEN_FLOAT_AC
9803 #undef GEN_FLOAT_AC
9804 #undef GEN_FLOAT_B
9805 #undef GEN_FLOAT_BS
9806 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
9807 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
9808 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
9809 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
9810 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
9811 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9812 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9813 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
9814 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9815 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9816 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9817 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9818 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
9819 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9820 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9821 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
9822 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
9823 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
9824 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
9826 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT),
9827 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT),
9828 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT),
9829 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT),
9830 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES),
9831 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE),
9832 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL),
9833 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT),
9834 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT),
9835 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT),
9836 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT),
9837 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT),
9838 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT),
9839 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT),
9840 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT),
9841 #if defined(TARGET_PPC64)
9842 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B),
9843 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B),
9844 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B),
9845 #endif
9846 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT),
9847 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT),
9848 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT),
9849 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT),
9851 #undef GEN_LD
9852 #undef GEN_LDU
9853 #undef GEN_LDUX
9854 #undef GEN_LDX_E
9855 #undef GEN_LDS
9856 #define GEN_LD(name, ldop, opc, type) \
9857 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9858 #define GEN_LDU(name, ldop, opc, type) \
9859 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9860 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
9861 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9862 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
9863 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9864 #define GEN_LDS(name, ldop, op, type) \
9865 GEN_LD(name, ldop, op | 0x20, type) \
9866 GEN_LDU(name, ldop, op | 0x21, type) \
9867 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
9868 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
9870 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
9871 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
9872 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
9873 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
9874 #if defined(TARGET_PPC64)
9875 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
9876 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
9877 GEN_LDUX(ld, ld64, 0x15, 0x01, PPC_64B)
9878 GEN_LDX(ld, ld64, 0x15, 0x00, PPC_64B)
9879 GEN_LDX_E(ldbr, ld64ur, 0x14, 0x10, PPC_NONE, PPC2_DBRX)
9880 #endif
9881 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
9882 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
9884 #undef GEN_ST
9885 #undef GEN_STU
9886 #undef GEN_STUX
9887 #undef GEN_STX_E
9888 #undef GEN_STS
9889 #define GEN_ST(name, stop, opc, type) \
9890 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9891 #define GEN_STU(name, stop, opc, type) \
9892 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
9893 #define GEN_STUX(name, stop, opc2, opc3, type) \
9894 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9895 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
9896 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9897 #define GEN_STS(name, stop, op, type) \
9898 GEN_ST(name, stop, op | 0x20, type) \
9899 GEN_STU(name, stop, op | 0x21, type) \
9900 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
9901 GEN_STX(name, stop, 0x17, op | 0x00, type)
9903 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
9904 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
9905 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
9906 #if defined(TARGET_PPC64)
9907 GEN_STUX(std, st64, 0x15, 0x05, PPC_64B)
9908 GEN_STX(std, st64, 0x15, 0x04, PPC_64B)
9909 GEN_STX_E(stdbr, st64r, 0x14, 0x14, PPC_NONE, PPC2_DBRX)
9910 #endif
9911 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
9912 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
9914 #undef GEN_LDF
9915 #undef GEN_LDUF
9916 #undef GEN_LDUXF
9917 #undef GEN_LDXF
9918 #undef GEN_LDFS
9919 #define GEN_LDF(name, ldop, opc, type) \
9920 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9921 #define GEN_LDUF(name, ldop, opc, type) \
9922 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9923 #define GEN_LDUXF(name, ldop, opc, type) \
9924 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9925 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
9926 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9927 #define GEN_LDFS(name, ldop, op, type) \
9928 GEN_LDF(name, ldop, op | 0x20, type) \
9929 GEN_LDUF(name, ldop, op | 0x21, type) \
9930 GEN_LDUXF(name, ldop, op | 0x01, type) \
9931 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
9933 GEN_LDFS(lfd, ld64, 0x12, PPC_FLOAT)
9934 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT)
9935 GEN_HANDLER_E(lfiwax, 0x1f, 0x17, 0x1a, 0x00000001, PPC_NONE, PPC2_ISA205),
9936 GEN_HANDLER_E(lfdp, 0x39, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9937 GEN_HANDLER_E(lfdpx, 0x1F, 0x17, 0x18, 0x00200001, PPC_NONE, PPC2_ISA205),
9939 #undef GEN_STF
9940 #undef GEN_STUF
9941 #undef GEN_STUXF
9942 #undef GEN_STXF
9943 #undef GEN_STFS
9944 #define GEN_STF(name, stop, opc, type) \
9945 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9946 #define GEN_STUF(name, stop, opc, type) \
9947 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9948 #define GEN_STUXF(name, stop, opc, type) \
9949 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9950 #define GEN_STXF(name, stop, opc2, opc3, type) \
9951 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9952 #define GEN_STFS(name, stop, op, type) \
9953 GEN_STF(name, stop, op | 0x20, type) \
9954 GEN_STUF(name, stop, op | 0x21, type) \
9955 GEN_STUXF(name, stop, op | 0x01, type) \
9956 GEN_STXF(name, stop, 0x17, op | 0x00, type)
9958 GEN_STFS(stfd, st64, 0x16, PPC_FLOAT)
9959 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT)
9960 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX)
9961 GEN_HANDLER_E(stfdp, 0x3D, 0xFF, 0xFF, 0x00200003, PPC_NONE, PPC2_ISA205),
9962 GEN_HANDLER_E(stfdpx, 0x1F, 0x17, 0x1C, 0x00200001, PPC_NONE, PPC2_ISA205),
9964 #undef GEN_CRLOGIC
9965 #define GEN_CRLOGIC(name, tcg_op, opc) \
9966 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
9967 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
9968 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
9969 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
9970 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
9971 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
9972 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
9973 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
9974 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
9976 #undef GEN_MAC_HANDLER
9977 #define GEN_MAC_HANDLER(name, opc2, opc3) \
9978 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
9979 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
9980 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
9981 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
9982 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
9983 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
9984 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
9985 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
9986 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
9987 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
9988 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
9989 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
9990 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
9991 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
9992 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
9993 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
9994 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
9995 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
9996 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
9997 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
9998 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
9999 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
10000 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
10001 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
10002 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
10003 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
10004 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
10005 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
10006 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
10007 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
10008 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
10009 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
10010 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
10011 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
10012 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
10013 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
10014 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
10015 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
10016 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
10017 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
10018 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
10019 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
10020 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
10022 #undef GEN_VR_LDX
10023 #undef GEN_VR_STX
10024 #undef GEN_VR_LVE
10025 #undef GEN_VR_STVE
10026 #define GEN_VR_LDX(name, opc2, opc3) \
10027 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10028 #define GEN_VR_STX(name, opc2, opc3) \
10029 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10030 #define GEN_VR_LVE(name, opc2, opc3) \
10031 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10032 #define GEN_VR_STVE(name, opc2, opc3) \
10033 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10034 GEN_VR_LDX(lvx, 0x07, 0x03),
10035 GEN_VR_LDX(lvxl, 0x07, 0x0B),
10036 GEN_VR_LVE(bx, 0x07, 0x00),
10037 GEN_VR_LVE(hx, 0x07, 0x01),
10038 GEN_VR_LVE(wx, 0x07, 0x02),
10039 GEN_VR_STX(svx, 0x07, 0x07),
10040 GEN_VR_STX(svxl, 0x07, 0x0F),
10041 GEN_VR_STVE(bx, 0x07, 0x04),
10042 GEN_VR_STVE(hx, 0x07, 0x05),
10043 GEN_VR_STVE(wx, 0x07, 0x06),
10045 #undef GEN_VX_LOGICAL
10046 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
10047 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10048 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16),
10049 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17),
10050 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18),
10051 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19),
10052 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20),
10054 #undef GEN_VXFORM
10055 #define GEN_VXFORM(name, opc2, opc3) \
10056 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10057 GEN_VXFORM(vaddubm, 0, 0),
10058 GEN_VXFORM(vadduhm, 0, 1),
10059 GEN_VXFORM(vadduwm, 0, 2),
10060 GEN_VXFORM(vsububm, 0, 16),
10061 GEN_VXFORM(vsubuhm, 0, 17),
10062 GEN_VXFORM(vsubuwm, 0, 18),
10063 GEN_VXFORM(vmaxub, 1, 0),
10064 GEN_VXFORM(vmaxuh, 1, 1),
10065 GEN_VXFORM(vmaxuw, 1, 2),
10066 GEN_VXFORM(vmaxsb, 1, 4),
10067 GEN_VXFORM(vmaxsh, 1, 5),
10068 GEN_VXFORM(vmaxsw, 1, 6),
10069 GEN_VXFORM(vminub, 1, 8),
10070 GEN_VXFORM(vminuh, 1, 9),
10071 GEN_VXFORM(vminuw, 1, 10),
10072 GEN_VXFORM(vminsb, 1, 12),
10073 GEN_VXFORM(vminsh, 1, 13),
10074 GEN_VXFORM(vminsw, 1, 14),
10075 GEN_VXFORM(vavgub, 1, 16),
10076 GEN_VXFORM(vavguh, 1, 17),
10077 GEN_VXFORM(vavguw, 1, 18),
10078 GEN_VXFORM(vavgsb, 1, 20),
10079 GEN_VXFORM(vavgsh, 1, 21),
10080 GEN_VXFORM(vavgsw, 1, 22),
10081 GEN_VXFORM(vmrghb, 6, 0),
10082 GEN_VXFORM(vmrghh, 6, 1),
10083 GEN_VXFORM(vmrghw, 6, 2),
10084 GEN_VXFORM(vmrglb, 6, 4),
10085 GEN_VXFORM(vmrglh, 6, 5),
10086 GEN_VXFORM(vmrglw, 6, 6),
10087 GEN_VXFORM(vmuloub, 4, 0),
10088 GEN_VXFORM(vmulouh, 4, 1),
10089 GEN_VXFORM(vmulosb, 4, 4),
10090 GEN_VXFORM(vmulosh, 4, 5),
10091 GEN_VXFORM(vmuleub, 4, 8),
10092 GEN_VXFORM(vmuleuh, 4, 9),
10093 GEN_VXFORM(vmulesb, 4, 12),
10094 GEN_VXFORM(vmulesh, 4, 13),
10095 GEN_VXFORM(vslb, 2, 4),
10096 GEN_VXFORM(vslh, 2, 5),
10097 GEN_VXFORM(vslw, 2, 6),
10098 GEN_VXFORM(vsrb, 2, 8),
10099 GEN_VXFORM(vsrh, 2, 9),
10100 GEN_VXFORM(vsrw, 2, 10),
10101 GEN_VXFORM(vsrab, 2, 12),
10102 GEN_VXFORM(vsrah, 2, 13),
10103 GEN_VXFORM(vsraw, 2, 14),
10104 GEN_VXFORM(vslo, 6, 16),
10105 GEN_VXFORM(vsro, 6, 17),
10106 GEN_VXFORM(vaddcuw, 0, 6),
10107 GEN_VXFORM(vsubcuw, 0, 22),
10108 GEN_VXFORM(vaddubs, 0, 8),
10109 GEN_VXFORM(vadduhs, 0, 9),
10110 GEN_VXFORM(vadduws, 0, 10),
10111 GEN_VXFORM(vaddsbs, 0, 12),
10112 GEN_VXFORM(vaddshs, 0, 13),
10113 GEN_VXFORM(vaddsws, 0, 14),
10114 GEN_VXFORM(vsububs, 0, 24),
10115 GEN_VXFORM(vsubuhs, 0, 25),
10116 GEN_VXFORM(vsubuws, 0, 26),
10117 GEN_VXFORM(vsubsbs, 0, 28),
10118 GEN_VXFORM(vsubshs, 0, 29),
10119 GEN_VXFORM(vsubsws, 0, 30),
10120 GEN_VXFORM(vrlb, 2, 0),
10121 GEN_VXFORM(vrlh, 2, 1),
10122 GEN_VXFORM(vrlw, 2, 2),
10123 GEN_VXFORM(vsl, 2, 7),
10124 GEN_VXFORM(vsr, 2, 11),
10125 GEN_VXFORM(vpkuhum, 7, 0),
10126 GEN_VXFORM(vpkuwum, 7, 1),
10127 GEN_VXFORM(vpkuhus, 7, 2),
10128 GEN_VXFORM(vpkuwus, 7, 3),
10129 GEN_VXFORM(vpkshus, 7, 4),
10130 GEN_VXFORM(vpkswus, 7, 5),
10131 GEN_VXFORM(vpkshss, 7, 6),
10132 GEN_VXFORM(vpkswss, 7, 7),
10133 GEN_VXFORM(vpkpx, 7, 12),
10134 GEN_VXFORM(vsum4ubs, 4, 24),
10135 GEN_VXFORM(vsum4sbs, 4, 28),
10136 GEN_VXFORM(vsum4shs, 4, 25),
10137 GEN_VXFORM(vsum2sws, 4, 26),
10138 GEN_VXFORM(vsumsws, 4, 30),
10139 GEN_VXFORM(vaddfp, 5, 0),
10140 GEN_VXFORM(vsubfp, 5, 1),
10141 GEN_VXFORM(vmaxfp, 5, 16),
10142 GEN_VXFORM(vminfp, 5, 17),
10144 #undef GEN_VXRFORM1
10145 #undef GEN_VXRFORM
10146 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
10147 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
10148 #define GEN_VXRFORM(name, opc2, opc3) \
10149 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
10150 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
10151 GEN_VXRFORM(vcmpequb, 3, 0)
10152 GEN_VXRFORM(vcmpequh, 3, 1)
10153 GEN_VXRFORM(vcmpequw, 3, 2)
10154 GEN_VXRFORM(vcmpgtsb, 3, 12)
10155 GEN_VXRFORM(vcmpgtsh, 3, 13)
10156 GEN_VXRFORM(vcmpgtsw, 3, 14)
10157 GEN_VXRFORM(vcmpgtub, 3, 8)
10158 GEN_VXRFORM(vcmpgtuh, 3, 9)
10159 GEN_VXRFORM(vcmpgtuw, 3, 10)
10160 GEN_VXRFORM(vcmpeqfp, 3, 3)
10161 GEN_VXRFORM(vcmpgefp, 3, 7)
10162 GEN_VXRFORM(vcmpgtfp, 3, 11)
10163 GEN_VXRFORM(vcmpbfp, 3, 15)
10165 #undef GEN_VXFORM_SIMM
10166 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
10167 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10168 GEN_VXFORM_SIMM(vspltisb, 6, 12),
10169 GEN_VXFORM_SIMM(vspltish, 6, 13),
10170 GEN_VXFORM_SIMM(vspltisw, 6, 14),
10172 #undef GEN_VXFORM_NOA
10173 #define GEN_VXFORM_NOA(name, opc2, opc3) \
10174 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
10175 GEN_VXFORM_NOA(vupkhsb, 7, 8),
10176 GEN_VXFORM_NOA(vupkhsh, 7, 9),
10177 GEN_VXFORM_NOA(vupklsb, 7, 10),
10178 GEN_VXFORM_NOA(vupklsh, 7, 11),
10179 GEN_VXFORM_NOA(vupkhpx, 7, 13),
10180 GEN_VXFORM_NOA(vupklpx, 7, 15),
10181 GEN_VXFORM_NOA(vrefp, 5, 4),
10182 GEN_VXFORM_NOA(vrsqrtefp, 5, 5),
10183 GEN_VXFORM_NOA(vexptefp, 5, 6),
10184 GEN_VXFORM_NOA(vlogefp, 5, 7),
10185 GEN_VXFORM_NOA(vrfim, 5, 8),
10186 GEN_VXFORM_NOA(vrfin, 5, 9),
10187 GEN_VXFORM_NOA(vrfip, 5, 10),
10188 GEN_VXFORM_NOA(vrfiz, 5, 11),
10190 #undef GEN_VXFORM_UIMM
10191 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
10192 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10193 GEN_VXFORM_UIMM(vspltb, 6, 8),
10194 GEN_VXFORM_UIMM(vsplth, 6, 9),
10195 GEN_VXFORM_UIMM(vspltw, 6, 10),
10196 GEN_VXFORM_UIMM(vcfux, 5, 12),
10197 GEN_VXFORM_UIMM(vcfsx, 5, 13),
10198 GEN_VXFORM_UIMM(vctuxs, 5, 14),
10199 GEN_VXFORM_UIMM(vctsxs, 5, 15),
10201 #undef GEN_VAFORM_PAIRED
10202 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
10203 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
10204 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16),
10205 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18),
10206 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19),
10207 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20),
10208 GEN_VAFORM_PAIRED(vsel, vperm, 21),
10209 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23),
10211 GEN_HANDLER_E(lxsdx, 0x1F, 0x0C, 0x12, 0, PPC_NONE, PPC2_VSX),
10212 GEN_HANDLER_E(lxsiwax, 0x1F, 0x0C, 0x02, 0, PPC_NONE, PPC2_VSX207),
10213 GEN_HANDLER_E(lxsiwzx, 0x1F, 0x0C, 0x00, 0, PPC_NONE, PPC2_VSX207),
10214 GEN_HANDLER_E(lxsspx, 0x1F, 0x0C, 0x10, 0, PPC_NONE, PPC2_VSX207),
10215 GEN_HANDLER_E(lxvd2x, 0x1F, 0x0C, 0x1A, 0, PPC_NONE, PPC2_VSX),
10216 GEN_HANDLER_E(lxvdsx, 0x1F, 0x0C, 0x0A, 0, PPC_NONE, PPC2_VSX),
10217 GEN_HANDLER_E(lxvw4x, 0x1F, 0x0C, 0x18, 0, PPC_NONE, PPC2_VSX),
10219 GEN_HANDLER_E(stxsdx, 0x1F, 0xC, 0x16, 0, PPC_NONE, PPC2_VSX),
10220 GEN_HANDLER_E(stxsiwx, 0x1F, 0xC, 0x04, 0, PPC_NONE, PPC2_VSX207),
10221 GEN_HANDLER_E(stxsspx, 0x1F, 0xC, 0x14, 0, PPC_NONE, PPC2_VSX207),
10222 GEN_HANDLER_E(stxvd2x, 0x1F, 0xC, 0x1E, 0, PPC_NONE, PPC2_VSX),
10223 GEN_HANDLER_E(stxvw4x, 0x1F, 0xC, 0x1C, 0, PPC_NONE, PPC2_VSX),
10225 GEN_HANDLER_E(mfvsrwz, 0x1F, 0x13, 0x03, 0x0000F800, PPC_NONE, PPC2_VSX207),
10226 GEN_HANDLER_E(mtvsrwa, 0x1F, 0x13, 0x06, 0x0000F800, PPC_NONE, PPC2_VSX207),
10227 GEN_HANDLER_E(mtvsrwz, 0x1F, 0x13, 0x07, 0x0000F800, PPC_NONE, PPC2_VSX207),
10228 #if defined(TARGET_PPC64)
10229 GEN_HANDLER_E(mfvsrd, 0x1F, 0x13, 0x01, 0x0000F800, PPC_NONE, PPC2_VSX207),
10230 GEN_HANDLER_E(mtvsrd, 0x1F, 0x13, 0x05, 0x0000F800, PPC_NONE, PPC2_VSX207),
10231 #endif
10233 #undef GEN_XX2FORM
10234 #define GEN_XX2FORM(name, opc2, opc3, fl2) \
10235 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
10236 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2)
10238 #undef GEN_XX3FORM
10239 #define GEN_XX3FORM(name, opc2, opc3, fl2) \
10240 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
10241 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2), \
10242 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 2, opc3, 0, PPC_NONE, fl2), \
10243 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 3, opc3, 0, PPC_NONE, fl2)
10245 #undef GEN_XX3_RC_FORM
10246 #define GEN_XX3_RC_FORM(name, opc2, opc3, fl2) \
10247 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x00, opc3 | 0x00, 0, PPC_NONE, fl2), \
10248 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x01, opc3 | 0x00, 0, PPC_NONE, fl2), \
10249 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x02, opc3 | 0x00, 0, PPC_NONE, fl2), \
10250 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x03, opc3 | 0x00, 0, PPC_NONE, fl2), \
10251 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x00, opc3 | 0x10, 0, PPC_NONE, fl2), \
10252 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x01, opc3 | 0x10, 0, PPC_NONE, fl2), \
10253 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x02, opc3 | 0x10, 0, PPC_NONE, fl2), \
10254 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x03, opc3 | 0x10, 0, PPC_NONE, fl2)
10256 #undef GEN_XX3FORM_DM
10257 #define GEN_XX3FORM_DM(name, opc2, opc3) \
10258 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
10259 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
10260 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
10261 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
10262 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
10263 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
10264 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
10265 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
10266 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
10267 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
10268 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
10269 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
10270 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
10271 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
10272 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
10273 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x0C, 0, PPC_NONE, PPC2_VSX)
10275 GEN_XX2FORM(xsabsdp, 0x12, 0x15, PPC2_VSX),
10276 GEN_XX2FORM(xsnabsdp, 0x12, 0x16, PPC2_VSX),
10277 GEN_XX2FORM(xsnegdp, 0x12, 0x17, PPC2_VSX),
10278 GEN_XX3FORM(xscpsgndp, 0x00, 0x16, PPC2_VSX),
10280 GEN_XX2FORM(xvabsdp, 0x12, 0x1D, PPC2_VSX),
10281 GEN_XX2FORM(xvnabsdp, 0x12, 0x1E, PPC2_VSX),
10282 GEN_XX2FORM(xvnegdp, 0x12, 0x1F, PPC2_VSX),
10283 GEN_XX3FORM(xvcpsgndp, 0x00, 0x1E, PPC2_VSX),
10284 GEN_XX2FORM(xvabssp, 0x12, 0x19, PPC2_VSX),
10285 GEN_XX2FORM(xvnabssp, 0x12, 0x1A, PPC2_VSX),
10286 GEN_XX2FORM(xvnegsp, 0x12, 0x1B, PPC2_VSX),
10287 GEN_XX3FORM(xvcpsgnsp, 0x00, 0x1A, PPC2_VSX),
10289 GEN_XX3FORM(xsadddp, 0x00, 0x04, PPC2_VSX),
10290 GEN_XX3FORM(xssubdp, 0x00, 0x05, PPC2_VSX),
10291 GEN_XX3FORM(xsmuldp, 0x00, 0x06, PPC2_VSX),
10292 GEN_XX3FORM(xsdivdp, 0x00, 0x07, PPC2_VSX),
10293 GEN_XX2FORM(xsredp, 0x14, 0x05, PPC2_VSX),
10294 GEN_XX2FORM(xssqrtdp, 0x16, 0x04, PPC2_VSX),
10295 GEN_XX2FORM(xsrsqrtedp, 0x14, 0x04, PPC2_VSX),
10296 GEN_XX3FORM(xstdivdp, 0x14, 0x07, PPC2_VSX),
10297 GEN_XX2FORM(xstsqrtdp, 0x14, 0x06, PPC2_VSX),
10298 GEN_XX3FORM(xsmaddadp, 0x04, 0x04, PPC2_VSX),
10299 GEN_XX3FORM(xsmaddmdp, 0x04, 0x05, PPC2_VSX),
10300 GEN_XX3FORM(xsmsubadp, 0x04, 0x06, PPC2_VSX),
10301 GEN_XX3FORM(xsmsubmdp, 0x04, 0x07, PPC2_VSX),
10302 GEN_XX3FORM(xsnmaddadp, 0x04, 0x14, PPC2_VSX),
10303 GEN_XX3FORM(xsnmaddmdp, 0x04, 0x15, PPC2_VSX),
10304 GEN_XX3FORM(xsnmsubadp, 0x04, 0x16, PPC2_VSX),
10305 GEN_XX3FORM(xsnmsubmdp, 0x04, 0x17, PPC2_VSX),
10306 GEN_XX2FORM(xscmpodp, 0x0C, 0x05, PPC2_VSX),
10307 GEN_XX2FORM(xscmpudp, 0x0C, 0x04, PPC2_VSX),
10308 GEN_XX3FORM(xsmaxdp, 0x00, 0x14, PPC2_VSX),
10309 GEN_XX3FORM(xsmindp, 0x00, 0x15, PPC2_VSX),
10310 GEN_XX2FORM(xscvdpsp, 0x12, 0x10, PPC2_VSX),
10311 GEN_XX2FORM(xscvdpspn, 0x16, 0x10, PPC2_VSX207),
10312 GEN_XX2FORM(xscvspdp, 0x12, 0x14, PPC2_VSX),
10313 GEN_XX2FORM(xscvspdpn, 0x16, 0x14, PPC2_VSX207),
10314 GEN_XX2FORM(xscvdpsxds, 0x10, 0x15, PPC2_VSX),
10315 GEN_XX2FORM(xscvdpsxws, 0x10, 0x05, PPC2_VSX),
10316 GEN_XX2FORM(xscvdpuxds, 0x10, 0x14, PPC2_VSX),
10317 GEN_XX2FORM(xscvdpuxws, 0x10, 0x04, PPC2_VSX),
10318 GEN_XX2FORM(xscvsxddp, 0x10, 0x17, PPC2_VSX),
10319 GEN_XX2FORM(xscvuxddp, 0x10, 0x16, PPC2_VSX),
10320 GEN_XX2FORM(xsrdpi, 0x12, 0x04, PPC2_VSX),
10321 GEN_XX2FORM(xsrdpic, 0x16, 0x06, PPC2_VSX),
10322 GEN_XX2FORM(xsrdpim, 0x12, 0x07, PPC2_VSX),
10323 GEN_XX2FORM(xsrdpip, 0x12, 0x06, PPC2_VSX),
10324 GEN_XX2FORM(xsrdpiz, 0x12, 0x05, PPC2_VSX),
10326 GEN_XX3FORM(xsaddsp, 0x00, 0x00, PPC2_VSX207),
10327 GEN_XX3FORM(xssubsp, 0x00, 0x01, PPC2_VSX207),
10328 GEN_XX3FORM(xsmulsp, 0x00, 0x02, PPC2_VSX207),
10329 GEN_XX3FORM(xsdivsp, 0x00, 0x03, PPC2_VSX207),
10330 GEN_XX2FORM(xsresp, 0x14, 0x01, PPC2_VSX207),
10331 GEN_XX2FORM(xsrsp, 0x12, 0x11, PPC2_VSX207),
10332 GEN_XX2FORM(xssqrtsp, 0x16, 0x00, PPC2_VSX207),
10333 GEN_XX2FORM(xsrsqrtesp, 0x14, 0x00, PPC2_VSX207),
10334 GEN_XX3FORM(xsmaddasp, 0x04, 0x00, PPC2_VSX207),
10335 GEN_XX3FORM(xsmaddmsp, 0x04, 0x01, PPC2_VSX207),
10336 GEN_XX3FORM(xsmsubasp, 0x04, 0x02, PPC2_VSX207),
10337 GEN_XX3FORM(xsmsubmsp, 0x04, 0x03, PPC2_VSX207),
10338 GEN_XX3FORM(xsnmaddasp, 0x04, 0x10, PPC2_VSX207),
10339 GEN_XX3FORM(xsnmaddmsp, 0x04, 0x11, PPC2_VSX207),
10340 GEN_XX3FORM(xsnmsubasp, 0x04, 0x12, PPC2_VSX207),
10341 GEN_XX3FORM(xsnmsubmsp, 0x04, 0x13, PPC2_VSX207),
10342 GEN_XX2FORM(xscvsxdsp, 0x10, 0x13, PPC2_VSX207),
10343 GEN_XX2FORM(xscvuxdsp, 0x10, 0x12, PPC2_VSX207),
10345 GEN_XX3FORM(xvadddp, 0x00, 0x0C, PPC2_VSX),
10346 GEN_XX3FORM(xvsubdp, 0x00, 0x0D, PPC2_VSX),
10347 GEN_XX3FORM(xvmuldp, 0x00, 0x0E, PPC2_VSX),
10348 GEN_XX3FORM(xvdivdp, 0x00, 0x0F, PPC2_VSX),
10349 GEN_XX2FORM(xvredp, 0x14, 0x0D, PPC2_VSX),
10350 GEN_XX2FORM(xvsqrtdp, 0x16, 0x0C, PPC2_VSX),
10351 GEN_XX2FORM(xvrsqrtedp, 0x14, 0x0C, PPC2_VSX),
10352 GEN_XX3FORM(xvtdivdp, 0x14, 0x0F, PPC2_VSX),
10353 GEN_XX2FORM(xvtsqrtdp, 0x14, 0x0E, PPC2_VSX),
10354 GEN_XX3FORM(xvmaddadp, 0x04, 0x0C, PPC2_VSX),
10355 GEN_XX3FORM(xvmaddmdp, 0x04, 0x0D, PPC2_VSX),
10356 GEN_XX3FORM(xvmsubadp, 0x04, 0x0E, PPC2_VSX),
10357 GEN_XX3FORM(xvmsubmdp, 0x04, 0x0F, PPC2_VSX),
10358 GEN_XX3FORM(xvnmaddadp, 0x04, 0x1C, PPC2_VSX),
10359 GEN_XX3FORM(xvnmaddmdp, 0x04, 0x1D, PPC2_VSX),
10360 GEN_XX3FORM(xvnmsubadp, 0x04, 0x1E, PPC2_VSX),
10361 GEN_XX3FORM(xvnmsubmdp, 0x04, 0x1F, PPC2_VSX),
10362 GEN_XX3FORM(xvmaxdp, 0x00, 0x1C, PPC2_VSX),
10363 GEN_XX3FORM(xvmindp, 0x00, 0x1D, PPC2_VSX),
10364 GEN_XX3_RC_FORM(xvcmpeqdp, 0x0C, 0x0C, PPC2_VSX),
10365 GEN_XX3_RC_FORM(xvcmpgtdp, 0x0C, 0x0D, PPC2_VSX),
10366 GEN_XX3_RC_FORM(xvcmpgedp, 0x0C, 0x0E, PPC2_VSX),
10367 GEN_XX2FORM(xvcvdpsp, 0x12, 0x18, PPC2_VSX),
10368 GEN_XX2FORM(xvcvdpsxds, 0x10, 0x1D, PPC2_VSX),
10369 GEN_XX2FORM(xvcvdpsxws, 0x10, 0x0D, PPC2_VSX),
10370 GEN_XX2FORM(xvcvdpuxds, 0x10, 0x1C, PPC2_VSX),
10371 GEN_XX2FORM(xvcvdpuxws, 0x10, 0x0C, PPC2_VSX),
10372 GEN_XX2FORM(xvcvsxddp, 0x10, 0x1F, PPC2_VSX),
10373 GEN_XX2FORM(xvcvuxddp, 0x10, 0x1E, PPC2_VSX),
10374 GEN_XX2FORM(xvcvsxwdp, 0x10, 0x0F, PPC2_VSX),
10375 GEN_XX2FORM(xvcvuxwdp, 0x10, 0x0E, PPC2_VSX),
10376 GEN_XX2FORM(xvrdpi, 0x12, 0x0C, PPC2_VSX),
10377 GEN_XX2FORM(xvrdpic, 0x16, 0x0E, PPC2_VSX),
10378 GEN_XX2FORM(xvrdpim, 0x12, 0x0F, PPC2_VSX),
10379 GEN_XX2FORM(xvrdpip, 0x12, 0x0E, PPC2_VSX),
10380 GEN_XX2FORM(xvrdpiz, 0x12, 0x0D, PPC2_VSX),
10382 GEN_XX3FORM(xvaddsp, 0x00, 0x08, PPC2_VSX),
10383 GEN_XX3FORM(xvsubsp, 0x00, 0x09, PPC2_VSX),
10384 GEN_XX3FORM(xvmulsp, 0x00, 0x0A, PPC2_VSX),
10385 GEN_XX3FORM(xvdivsp, 0x00, 0x0B, PPC2_VSX),
10386 GEN_XX2FORM(xvresp, 0x14, 0x09, PPC2_VSX),
10387 GEN_XX2FORM(xvsqrtsp, 0x16, 0x08, PPC2_VSX),
10388 GEN_XX2FORM(xvrsqrtesp, 0x14, 0x08, PPC2_VSX),
10389 GEN_XX3FORM(xvtdivsp, 0x14, 0x0B, PPC2_VSX),
10390 GEN_XX2FORM(xvtsqrtsp, 0x14, 0x0A, PPC2_VSX),
10391 GEN_XX3FORM(xvmaddasp, 0x04, 0x08, PPC2_VSX),
10392 GEN_XX3FORM(xvmaddmsp, 0x04, 0x09, PPC2_VSX),
10393 GEN_XX3FORM(xvmsubasp, 0x04, 0x0A, PPC2_VSX),
10394 GEN_XX3FORM(xvmsubmsp, 0x04, 0x0B, PPC2_VSX),
10395 GEN_XX3FORM(xvnmaddasp, 0x04, 0x18, PPC2_VSX),
10396 GEN_XX3FORM(xvnmaddmsp, 0x04, 0x19, PPC2_VSX),
10397 GEN_XX3FORM(xvnmsubasp, 0x04, 0x1A, PPC2_VSX),
10398 GEN_XX3FORM(xvnmsubmsp, 0x04, 0x1B, PPC2_VSX),
10399 GEN_XX3FORM(xvmaxsp, 0x00, 0x18, PPC2_VSX),
10400 GEN_XX3FORM(xvminsp, 0x00, 0x19, PPC2_VSX),
10401 GEN_XX3_RC_FORM(xvcmpeqsp, 0x0C, 0x08, PPC2_VSX),
10402 GEN_XX3_RC_FORM(xvcmpgtsp, 0x0C, 0x09, PPC2_VSX),
10403 GEN_XX3_RC_FORM(xvcmpgesp, 0x0C, 0x0A, PPC2_VSX),
10404 GEN_XX2FORM(xvcvspdp, 0x12, 0x1C, PPC2_VSX),
10405 GEN_XX2FORM(xvcvspsxds, 0x10, 0x19, PPC2_VSX),
10406 GEN_XX2FORM(xvcvspsxws, 0x10, 0x09, PPC2_VSX),
10407 GEN_XX2FORM(xvcvspuxds, 0x10, 0x18, PPC2_VSX),
10408 GEN_XX2FORM(xvcvspuxws, 0x10, 0x08, PPC2_VSX),
10409 GEN_XX2FORM(xvcvsxdsp, 0x10, 0x1B, PPC2_VSX),
10410 GEN_XX2FORM(xvcvuxdsp, 0x10, 0x1A, PPC2_VSX),
10411 GEN_XX2FORM(xvcvsxwsp, 0x10, 0x0B, PPC2_VSX),
10412 GEN_XX2FORM(xvcvuxwsp, 0x10, 0x0A, PPC2_VSX),
10413 GEN_XX2FORM(xvrspi, 0x12, 0x08, PPC2_VSX),
10414 GEN_XX2FORM(xvrspic, 0x16, 0x0A, PPC2_VSX),
10415 GEN_XX2FORM(xvrspim, 0x12, 0x0B, PPC2_VSX),
10416 GEN_XX2FORM(xvrspip, 0x12, 0x0A, PPC2_VSX),
10417 GEN_XX2FORM(xvrspiz, 0x12, 0x09, PPC2_VSX),
10419 #undef VSX_LOGICAL
10420 #define VSX_LOGICAL(name, opc2, opc3, fl2) \
10421 GEN_XX3FORM(name, opc2, opc3, fl2)
10423 VSX_LOGICAL(xxland, 0x8, 0x10, PPC2_VSX),
10424 VSX_LOGICAL(xxlandc, 0x8, 0x11, PPC2_VSX),
10425 VSX_LOGICAL(xxlor, 0x8, 0x12, PPC2_VSX),
10426 VSX_LOGICAL(xxlxor, 0x8, 0x13, PPC2_VSX),
10427 VSX_LOGICAL(xxlnor, 0x8, 0x14, PPC2_VSX),
10428 VSX_LOGICAL(xxleqv, 0x8, 0x17, PPC2_VSX207),
10429 VSX_LOGICAL(xxlnand, 0x8, 0x16, PPC2_VSX207),
10430 VSX_LOGICAL(xxlorc, 0x8, 0x15, PPC2_VSX207),
10431 GEN_XX3FORM(xxmrghw, 0x08, 0x02, PPC2_VSX),
10432 GEN_XX3FORM(xxmrglw, 0x08, 0x06, PPC2_VSX),
10433 GEN_XX2FORM(xxspltw, 0x08, 0x0A, PPC2_VSX),
10434 GEN_XX3FORM_DM(xxsldwi, 0x08, 0x00),
10436 #define GEN_XXSEL_ROW(opc3) \
10437 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x18, opc3, 0, PPC_NONE, PPC2_VSX), \
10438 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x19, opc3, 0, PPC_NONE, PPC2_VSX), \
10439 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1A, opc3, 0, PPC_NONE, PPC2_VSX), \
10440 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1B, opc3, 0, PPC_NONE, PPC2_VSX), \
10441 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1C, opc3, 0, PPC_NONE, PPC2_VSX), \
10442 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1D, opc3, 0, PPC_NONE, PPC2_VSX), \
10443 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1E, opc3, 0, PPC_NONE, PPC2_VSX), \
10444 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1F, opc3, 0, PPC_NONE, PPC2_VSX), \
10446 GEN_XXSEL_ROW(0x00)
10447 GEN_XXSEL_ROW(0x01)
10448 GEN_XXSEL_ROW(0x02)
10449 GEN_XXSEL_ROW(0x03)
10450 GEN_XXSEL_ROW(0x04)
10451 GEN_XXSEL_ROW(0x05)
10452 GEN_XXSEL_ROW(0x06)
10453 GEN_XXSEL_ROW(0x07)
10454 GEN_XXSEL_ROW(0x08)
10455 GEN_XXSEL_ROW(0x09)
10456 GEN_XXSEL_ROW(0x0A)
10457 GEN_XXSEL_ROW(0x0B)
10458 GEN_XXSEL_ROW(0x0C)
10459 GEN_XXSEL_ROW(0x0D)
10460 GEN_XXSEL_ROW(0x0E)
10461 GEN_XXSEL_ROW(0x0F)
10462 GEN_XXSEL_ROW(0x10)
10463 GEN_XXSEL_ROW(0x11)
10464 GEN_XXSEL_ROW(0x12)
10465 GEN_XXSEL_ROW(0x13)
10466 GEN_XXSEL_ROW(0x14)
10467 GEN_XXSEL_ROW(0x15)
10468 GEN_XXSEL_ROW(0x16)
10469 GEN_XXSEL_ROW(0x17)
10470 GEN_XXSEL_ROW(0x18)
10471 GEN_XXSEL_ROW(0x19)
10472 GEN_XXSEL_ROW(0x1A)
10473 GEN_XXSEL_ROW(0x1B)
10474 GEN_XXSEL_ROW(0x1C)
10475 GEN_XXSEL_ROW(0x1D)
10476 GEN_XXSEL_ROW(0x1E)
10477 GEN_XXSEL_ROW(0x1F)
10479 GEN_XX3FORM_DM(xxpermdi, 0x08, 0x01),
10481 #undef GEN_SPE
10482 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
10483 GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
10484 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10485 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10486 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10487 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10488 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
10489 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
10490 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE),
10491 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE),
10492 GEN_SPE(evmra, speundef, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE),
10493 GEN_SPE(speundef, evand, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
10494 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10495 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10496 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10497 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
10498 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE),
10499 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE),
10500 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE),
10501 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10502 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10503 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10504 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10505 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE),
10506 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
10507 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE),
10508 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10509 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE),
10510 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE),
10511 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE),
10512 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE),
10514 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
10515 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
10516 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
10517 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
10518 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
10519 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10520 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10521 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10522 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10523 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10524 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10525 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10526 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
10527 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10529 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
10530 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE),
10531 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE),
10532 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE),
10533 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
10534 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE),
10535 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10536 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10537 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10538 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE),
10539 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10540 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10541 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE),
10542 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE),
10544 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
10545 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10546 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE),
10547 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE),
10548 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE),
10549 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10550 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
10551 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE),
10552 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10553 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10554 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10555 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE),
10556 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
10557 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
10558 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE),
10559 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE),
10561 #undef GEN_SPEOP_LDST
10562 #define GEN_SPEOP_LDST(name, opc2, sh) \
10563 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
10564 GEN_SPEOP_LDST(evldd, 0x00, 3),
10565 GEN_SPEOP_LDST(evldw, 0x01, 3),
10566 GEN_SPEOP_LDST(evldh, 0x02, 3),
10567 GEN_SPEOP_LDST(evlhhesplat, 0x04, 1),
10568 GEN_SPEOP_LDST(evlhhousplat, 0x06, 1),
10569 GEN_SPEOP_LDST(evlhhossplat, 0x07, 1),
10570 GEN_SPEOP_LDST(evlwhe, 0x08, 2),
10571 GEN_SPEOP_LDST(evlwhou, 0x0A, 2),
10572 GEN_SPEOP_LDST(evlwhos, 0x0B, 2),
10573 GEN_SPEOP_LDST(evlwwsplat, 0x0C, 2),
10574 GEN_SPEOP_LDST(evlwhsplat, 0x0E, 2),
10576 GEN_SPEOP_LDST(evstdd, 0x10, 3),
10577 GEN_SPEOP_LDST(evstdw, 0x11, 3),
10578 GEN_SPEOP_LDST(evstdh, 0x12, 3),
10579 GEN_SPEOP_LDST(evstwhe, 0x18, 2),
10580 GEN_SPEOP_LDST(evstwho, 0x1A, 2),
10581 GEN_SPEOP_LDST(evstwwe, 0x1C, 2),
10582 GEN_SPEOP_LDST(evstwwo, 0x1E, 2),
10585 #include "helper_regs.h"
10586 #include "translate_init.c"
10588 /*****************************************************************************/
10589 /* Misc PowerPC helpers */
10590 void ppc_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
10591 int flags)
10593 #define RGPL 4
10594 #define RFPL 4
10596 PowerPCCPU *cpu = POWERPC_CPU(cs);
10597 CPUPPCState *env = &cpu->env;
10598 int i;
10600 cpu_fprintf(f, "NIP " TARGET_FMT_lx " LR " TARGET_FMT_lx " CTR "
10601 TARGET_FMT_lx " XER " TARGET_FMT_lx "\n",
10602 env->nip, env->lr, env->ctr, cpu_read_xer(env));
10603 cpu_fprintf(f, "MSR " TARGET_FMT_lx " HID0 " TARGET_FMT_lx " HF "
10604 TARGET_FMT_lx " idx %d\n", env->msr, env->spr[SPR_HID0],
10605 env->hflags, env->mmu_idx);
10606 #if !defined(NO_TIMER_DUMP)
10607 cpu_fprintf(f, "TB %08" PRIu32 " %08" PRIu64
10608 #if !defined(CONFIG_USER_ONLY)
10609 " DECR %08" PRIu32
10610 #endif
10611 "\n",
10612 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
10613 #if !defined(CONFIG_USER_ONLY)
10614 , cpu_ppc_load_decr(env)
10615 #endif
10617 #endif
10618 for (i = 0; i < 32; i++) {
10619 if ((i & (RGPL - 1)) == 0)
10620 cpu_fprintf(f, "GPR%02d", i);
10621 cpu_fprintf(f, " %016" PRIx64, ppc_dump_gpr(env, i));
10622 if ((i & (RGPL - 1)) == (RGPL - 1))
10623 cpu_fprintf(f, "\n");
10625 cpu_fprintf(f, "CR ");
10626 for (i = 0; i < 8; i++)
10627 cpu_fprintf(f, "%01x", env->crf[i]);
10628 cpu_fprintf(f, " [");
10629 for (i = 0; i < 8; i++) {
10630 char a = '-';
10631 if (env->crf[i] & 0x08)
10632 a = 'L';
10633 else if (env->crf[i] & 0x04)
10634 a = 'G';
10635 else if (env->crf[i] & 0x02)
10636 a = 'E';
10637 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
10639 cpu_fprintf(f, " ] RES " TARGET_FMT_lx "\n",
10640 env->reserve_addr);
10641 for (i = 0; i < 32; i++) {
10642 if ((i & (RFPL - 1)) == 0)
10643 cpu_fprintf(f, "FPR%02d", i);
10644 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
10645 if ((i & (RFPL - 1)) == (RFPL - 1))
10646 cpu_fprintf(f, "\n");
10648 cpu_fprintf(f, "FPSCR " TARGET_FMT_lx "\n", env->fpscr);
10649 #if !defined(CONFIG_USER_ONLY)
10650 cpu_fprintf(f, " SRR0 " TARGET_FMT_lx " SRR1 " TARGET_FMT_lx
10651 " PVR " TARGET_FMT_lx " VRSAVE " TARGET_FMT_lx "\n",
10652 env->spr[SPR_SRR0], env->spr[SPR_SRR1],
10653 env->spr[SPR_PVR], env->spr[SPR_VRSAVE]);
10655 cpu_fprintf(f, "SPRG0 " TARGET_FMT_lx " SPRG1 " TARGET_FMT_lx
10656 " SPRG2 " TARGET_FMT_lx " SPRG3 " TARGET_FMT_lx "\n",
10657 env->spr[SPR_SPRG0], env->spr[SPR_SPRG1],
10658 env->spr[SPR_SPRG2], env->spr[SPR_SPRG3]);
10660 cpu_fprintf(f, "SPRG4 " TARGET_FMT_lx " SPRG5 " TARGET_FMT_lx
10661 " SPRG6 " TARGET_FMT_lx " SPRG7 " TARGET_FMT_lx "\n",
10662 env->spr[SPR_SPRG4], env->spr[SPR_SPRG5],
10663 env->spr[SPR_SPRG6], env->spr[SPR_SPRG7]);
10665 if (env->excp_model == POWERPC_EXCP_BOOKE) {
10666 cpu_fprintf(f, "CSRR0 " TARGET_FMT_lx " CSRR1 " TARGET_FMT_lx
10667 " MCSRR0 " TARGET_FMT_lx " MCSRR1 " TARGET_FMT_lx "\n",
10668 env->spr[SPR_BOOKE_CSRR0], env->spr[SPR_BOOKE_CSRR1],
10669 env->spr[SPR_BOOKE_MCSRR0], env->spr[SPR_BOOKE_MCSRR1]);
10671 cpu_fprintf(f, " TCR " TARGET_FMT_lx " TSR " TARGET_FMT_lx
10672 " ESR " TARGET_FMT_lx " DEAR " TARGET_FMT_lx "\n",
10673 env->spr[SPR_BOOKE_TCR], env->spr[SPR_BOOKE_TSR],
10674 env->spr[SPR_BOOKE_ESR], env->spr[SPR_BOOKE_DEAR]);
10676 cpu_fprintf(f, " PIR " TARGET_FMT_lx " DECAR " TARGET_FMT_lx
10677 " IVPR " TARGET_FMT_lx " EPCR " TARGET_FMT_lx "\n",
10678 env->spr[SPR_BOOKE_PIR], env->spr[SPR_BOOKE_DECAR],
10679 env->spr[SPR_BOOKE_IVPR], env->spr[SPR_BOOKE_EPCR]);
10681 cpu_fprintf(f, " MCSR " TARGET_FMT_lx " SPRG8 " TARGET_FMT_lx
10682 " EPR " TARGET_FMT_lx "\n",
10683 env->spr[SPR_BOOKE_MCSR], env->spr[SPR_BOOKE_SPRG8],
10684 env->spr[SPR_BOOKE_EPR]);
10686 /* FSL-specific */
10687 cpu_fprintf(f, " MCAR " TARGET_FMT_lx " PID1 " TARGET_FMT_lx
10688 " PID2 " TARGET_FMT_lx " SVR " TARGET_FMT_lx "\n",
10689 env->spr[SPR_Exxx_MCAR], env->spr[SPR_BOOKE_PID1],
10690 env->spr[SPR_BOOKE_PID2], env->spr[SPR_E500_SVR]);
10693 * IVORs are left out as they are large and do not change often --
10694 * they can be read with "p $ivor0", "p $ivor1", etc.
10698 #if defined(TARGET_PPC64)
10699 if (env->flags & POWERPC_FLAG_CFAR) {
10700 cpu_fprintf(f, " CFAR " TARGET_FMT_lx"\n", env->cfar);
10702 #endif
10704 switch (env->mmu_model) {
10705 case POWERPC_MMU_32B:
10706 case POWERPC_MMU_601:
10707 case POWERPC_MMU_SOFT_6xx:
10708 case POWERPC_MMU_SOFT_74xx:
10709 #if defined(TARGET_PPC64)
10710 case POWERPC_MMU_64B:
10711 case POWERPC_MMU_2_06:
10712 case POWERPC_MMU_2_06a:
10713 case POWERPC_MMU_2_06d:
10714 #endif
10715 cpu_fprintf(f, " SDR1 " TARGET_FMT_lx " DAR " TARGET_FMT_lx
10716 " DSISR " TARGET_FMT_lx "\n", env->spr[SPR_SDR1],
10717 env->spr[SPR_DAR], env->spr[SPR_DSISR]);
10718 break;
10719 case POWERPC_MMU_BOOKE206:
10720 cpu_fprintf(f, " MAS0 " TARGET_FMT_lx " MAS1 " TARGET_FMT_lx
10721 " MAS2 " TARGET_FMT_lx " MAS3 " TARGET_FMT_lx "\n",
10722 env->spr[SPR_BOOKE_MAS0], env->spr[SPR_BOOKE_MAS1],
10723 env->spr[SPR_BOOKE_MAS2], env->spr[SPR_BOOKE_MAS3]);
10725 cpu_fprintf(f, " MAS4 " TARGET_FMT_lx " MAS6 " TARGET_FMT_lx
10726 " MAS7 " TARGET_FMT_lx " PID " TARGET_FMT_lx "\n",
10727 env->spr[SPR_BOOKE_MAS4], env->spr[SPR_BOOKE_MAS6],
10728 env->spr[SPR_BOOKE_MAS7], env->spr[SPR_BOOKE_PID]);
10730 cpu_fprintf(f, "MMUCFG " TARGET_FMT_lx " TLB0CFG " TARGET_FMT_lx
10731 " TLB1CFG " TARGET_FMT_lx "\n",
10732 env->spr[SPR_MMUCFG], env->spr[SPR_BOOKE_TLB0CFG],
10733 env->spr[SPR_BOOKE_TLB1CFG]);
10734 break;
10735 default:
10736 break;
10738 #endif
10740 #undef RGPL
10741 #undef RFPL
10744 void ppc_cpu_dump_statistics(CPUState *cs, FILE*f,
10745 fprintf_function cpu_fprintf, int flags)
10747 #if defined(DO_PPC_STATISTICS)
10748 PowerPCCPU *cpu = POWERPC_CPU(cs);
10749 opc_handler_t **t1, **t2, **t3, *handler;
10750 int op1, op2, op3;
10752 t1 = cpu->env.opcodes;
10753 for (op1 = 0; op1 < 64; op1++) {
10754 handler = t1[op1];
10755 if (is_indirect_opcode(handler)) {
10756 t2 = ind_table(handler);
10757 for (op2 = 0; op2 < 32; op2++) {
10758 handler = t2[op2];
10759 if (is_indirect_opcode(handler)) {
10760 t3 = ind_table(handler);
10761 for (op3 = 0; op3 < 32; op3++) {
10762 handler = t3[op3];
10763 if (handler->count == 0)
10764 continue;
10765 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
10766 "%016" PRIx64 " %" PRId64 "\n",
10767 op1, op2, op3, op1, (op3 << 5) | op2,
10768 handler->oname,
10769 handler->count, handler->count);
10771 } else {
10772 if (handler->count == 0)
10773 continue;
10774 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
10775 "%016" PRIx64 " %" PRId64 "\n",
10776 op1, op2, op1, op2, handler->oname,
10777 handler->count, handler->count);
10780 } else {
10781 if (handler->count == 0)
10782 continue;
10783 cpu_fprintf(f, "%02x (%02x ) %16s: %016" PRIx64
10784 " %" PRId64 "\n",
10785 op1, op1, handler->oname,
10786 handler->count, handler->count);
10789 #endif
10792 /*****************************************************************************/
10793 static inline void gen_intermediate_code_internal(PowerPCCPU *cpu,
10794 TranslationBlock *tb,
10795 bool search_pc)
10797 CPUState *cs = CPU(cpu);
10798 CPUPPCState *env = &cpu->env;
10799 DisasContext ctx, *ctxp = &ctx;
10800 opc_handler_t **table, *handler;
10801 target_ulong pc_start;
10802 uint16_t *gen_opc_end;
10803 CPUBreakpoint *bp;
10804 int j, lj = -1;
10805 int num_insns;
10806 int max_insns;
10808 pc_start = tb->pc;
10809 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
10810 ctx.nip = pc_start;
10811 ctx.tb = tb;
10812 ctx.exception = POWERPC_EXCP_NONE;
10813 ctx.spr_cb = env->spr_cb;
10814 ctx.mem_idx = env->mmu_idx;
10815 ctx.insns_flags = env->insns_flags;
10816 ctx.insns_flags2 = env->insns_flags2;
10817 ctx.access_type = -1;
10818 ctx.le_mode = env->hflags & (1 << MSR_LE) ? 1 : 0;
10819 #if defined(TARGET_PPC64)
10820 ctx.sf_mode = msr_is_64bit(env, env->msr);
10821 ctx.has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
10822 #endif
10823 ctx.fpu_enabled = msr_fp;
10824 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
10825 ctx.spe_enabled = msr_spe;
10826 else
10827 ctx.spe_enabled = 0;
10828 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
10829 ctx.altivec_enabled = msr_vr;
10830 else
10831 ctx.altivec_enabled = 0;
10832 if ((env->flags & POWERPC_FLAG_VSX) && msr_vsx) {
10833 ctx.vsx_enabled = msr_vsx;
10834 } else {
10835 ctx.vsx_enabled = 0;
10837 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
10838 ctx.singlestep_enabled = CPU_SINGLE_STEP;
10839 else
10840 ctx.singlestep_enabled = 0;
10841 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
10842 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
10843 if (unlikely(cs->singlestep_enabled)) {
10844 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
10846 #if defined (DO_SINGLE_STEP) && 0
10847 /* Single step trace mode */
10848 msr_se = 1;
10849 #endif
10850 num_insns = 0;
10851 max_insns = tb->cflags & CF_COUNT_MASK;
10852 if (max_insns == 0)
10853 max_insns = CF_COUNT_MASK;
10855 gen_tb_start();
10856 /* Set env in case of segfault during code fetch */
10857 while (ctx.exception == POWERPC_EXCP_NONE
10858 && tcg_ctx.gen_opc_ptr < gen_opc_end) {
10859 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
10860 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
10861 if (bp->pc == ctx.nip) {
10862 gen_debug_exception(ctxp);
10863 break;
10867 if (unlikely(search_pc)) {
10868 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
10869 if (lj < j) {
10870 lj++;
10871 while (lj < j)
10872 tcg_ctx.gen_opc_instr_start[lj++] = 0;
10874 tcg_ctx.gen_opc_pc[lj] = ctx.nip;
10875 tcg_ctx.gen_opc_instr_start[lj] = 1;
10876 tcg_ctx.gen_opc_icount[lj] = num_insns;
10878 LOG_DISAS("----------------\n");
10879 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
10880 ctx.nip, ctx.mem_idx, (int)msr_ir);
10881 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
10882 gen_io_start();
10883 if (unlikely(ctx.le_mode)) {
10884 ctx.opcode = bswap32(cpu_ldl_code(env, ctx.nip));
10885 } else {
10886 ctx.opcode = cpu_ldl_code(env, ctx.nip);
10888 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
10889 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
10890 opc3(ctx.opcode), ctx.le_mode ? "little" : "big");
10891 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
10892 tcg_gen_debug_insn_start(ctx.nip);
10894 ctx.nip += 4;
10895 table = env->opcodes;
10896 num_insns++;
10897 handler = table[opc1(ctx.opcode)];
10898 if (is_indirect_opcode(handler)) {
10899 table = ind_table(handler);
10900 handler = table[opc2(ctx.opcode)];
10901 if (is_indirect_opcode(handler)) {
10902 table = ind_table(handler);
10903 handler = table[opc3(ctx.opcode)];
10906 /* Is opcode *REALLY* valid ? */
10907 if (unlikely(handler->handler == &gen_invalid)) {
10908 if (qemu_log_enabled()) {
10909 qemu_log("invalid/unsupported opcode: "
10910 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx " %d\n",
10911 opc1(ctx.opcode), opc2(ctx.opcode),
10912 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
10914 } else {
10915 uint32_t inval;
10917 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) && Rc(ctx.opcode))) {
10918 inval = handler->inval2;
10919 } else {
10920 inval = handler->inval1;
10923 if (unlikely((ctx.opcode & inval) != 0)) {
10924 if (qemu_log_enabled()) {
10925 qemu_log("invalid bits: %08x for opcode: "
10926 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx "\n",
10927 ctx.opcode & inval, opc1(ctx.opcode),
10928 opc2(ctx.opcode), opc3(ctx.opcode),
10929 ctx.opcode, ctx.nip - 4);
10931 gen_inval_exception(ctxp, POWERPC_EXCP_INVAL_INVAL);
10932 break;
10935 (*(handler->handler))(&ctx);
10936 #if defined(DO_PPC_STATISTICS)
10937 handler->count++;
10938 #endif
10939 /* Check trace mode exceptions */
10940 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
10941 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
10942 ctx.exception != POWERPC_SYSCALL &&
10943 ctx.exception != POWERPC_EXCP_TRAP &&
10944 ctx.exception != POWERPC_EXCP_BRANCH)) {
10945 gen_exception(ctxp, POWERPC_EXCP_TRACE);
10946 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
10947 (cs->singlestep_enabled) ||
10948 singlestep ||
10949 num_insns >= max_insns)) {
10950 /* if we reach a page boundary or are single stepping, stop
10951 * generation
10953 break;
10956 if (tb->cflags & CF_LAST_IO)
10957 gen_io_end();
10958 if (ctx.exception == POWERPC_EXCP_NONE) {
10959 gen_goto_tb(&ctx, 0, ctx.nip);
10960 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
10961 if (unlikely(cs->singlestep_enabled)) {
10962 gen_debug_exception(ctxp);
10964 /* Generate the return instruction */
10965 tcg_gen_exit_tb(0);
10967 gen_tb_end(tb, num_insns);
10968 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
10969 if (unlikely(search_pc)) {
10970 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
10971 lj++;
10972 while (lj <= j)
10973 tcg_ctx.gen_opc_instr_start[lj++] = 0;
10974 } else {
10975 tb->size = ctx.nip - pc_start;
10976 tb->icount = num_insns;
10978 #if defined(DEBUG_DISAS)
10979 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
10980 int flags;
10981 flags = env->bfd_mach;
10982 flags |= ctx.le_mode << 16;
10983 qemu_log("IN: %s\n", lookup_symbol(pc_start));
10984 log_target_disas(env, pc_start, ctx.nip - pc_start, flags);
10985 qemu_log("\n");
10987 #endif
10990 void gen_intermediate_code (CPUPPCState *env, struct TranslationBlock *tb)
10992 gen_intermediate_code_internal(ppc_env_get_cpu(env), tb, false);
10995 void gen_intermediate_code_pc (CPUPPCState *env, struct TranslationBlock *tb)
10997 gen_intermediate_code_internal(ppc_env_get_cpu(env), tb, true);
11000 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, int pc_pos)
11002 env->nip = tcg_ctx.gen_opc_pc[pc_pos];