2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #include "qemu/osdep.h"
23 #include "disas/disas.h"
24 #include "exec/exec-all.h"
26 #include "qemu/host-utils.h"
27 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
32 #include "trace-tcg.h"
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
40 /* Include definitions for instructions classes and implementations flags */
41 //#define PPC_DEBUG_DISAS
42 //#define DO_PPC_STATISTICS
44 #ifdef PPC_DEBUG_DISAS
45 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
47 # define LOG_DISAS(...) do { } while (0)
49 /*****************************************************************************/
50 /* Code translation helpers */
52 /* global register indexes */
53 static TCGv_env cpu_env
;
54 static char cpu_reg_names
[10*3 + 22*4 /* GPR */
55 + 10*4 + 22*5 /* SPE GPRh */
56 + 10*4 + 22*5 /* FPR */
57 + 2*(10*6 + 22*7) /* AVRh, AVRl */
58 + 10*5 + 22*6 /* VSR */
60 static TCGv cpu_gpr
[32];
61 static TCGv cpu_gprh
[32];
62 static TCGv_i64 cpu_fpr
[32];
63 static TCGv_i64 cpu_avrh
[32], cpu_avrl
[32];
64 static TCGv_i64 cpu_vsr
[32];
65 static TCGv_i32 cpu_crf
[8];
70 #if defined(TARGET_PPC64)
73 static TCGv cpu_xer
, cpu_so
, cpu_ov
, cpu_ca
;
74 static TCGv cpu_reserve
;
75 static TCGv cpu_fpscr
;
76 static TCGv_i32 cpu_access_type
;
78 #include "exec/gen-icount.h"
80 void ppc_translate_init(void)
84 size_t cpu_reg_names_size
;
85 static int done_init
= 0;
90 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
91 tcg_ctx
.tcg_env
= cpu_env
;
94 cpu_reg_names_size
= sizeof(cpu_reg_names
);
96 for (i
= 0; i
< 8; i
++) {
97 snprintf(p
, cpu_reg_names_size
, "crf%d", i
);
98 cpu_crf
[i
] = tcg_global_mem_new_i32(cpu_env
,
99 offsetof(CPUPPCState
, crf
[i
]), p
);
101 cpu_reg_names_size
-= 5;
104 for (i
= 0; i
< 32; i
++) {
105 snprintf(p
, cpu_reg_names_size
, "r%d", i
);
106 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
107 offsetof(CPUPPCState
, gpr
[i
]), p
);
108 p
+= (i
< 10) ? 3 : 4;
109 cpu_reg_names_size
-= (i
< 10) ? 3 : 4;
110 snprintf(p
, cpu_reg_names_size
, "r%dH", i
);
111 cpu_gprh
[i
] = tcg_global_mem_new(cpu_env
,
112 offsetof(CPUPPCState
, gprh
[i
]), p
);
113 p
+= (i
< 10) ? 4 : 5;
114 cpu_reg_names_size
-= (i
< 10) ? 4 : 5;
116 snprintf(p
, cpu_reg_names_size
, "fp%d", i
);
117 cpu_fpr
[i
] = tcg_global_mem_new_i64(cpu_env
,
118 offsetof(CPUPPCState
, fpr
[i
]), p
);
119 p
+= (i
< 10) ? 4 : 5;
120 cpu_reg_names_size
-= (i
< 10) ? 4 : 5;
122 snprintf(p
, cpu_reg_names_size
, "avr%dH", i
);
123 #ifdef HOST_WORDS_BIGENDIAN
124 cpu_avrh
[i
] = tcg_global_mem_new_i64(cpu_env
,
125 offsetof(CPUPPCState
, avr
[i
].u64
[0]), p
);
127 cpu_avrh
[i
] = tcg_global_mem_new_i64(cpu_env
,
128 offsetof(CPUPPCState
, avr
[i
].u64
[1]), p
);
130 p
+= (i
< 10) ? 6 : 7;
131 cpu_reg_names_size
-= (i
< 10) ? 6 : 7;
133 snprintf(p
, cpu_reg_names_size
, "avr%dL", i
);
134 #ifdef HOST_WORDS_BIGENDIAN
135 cpu_avrl
[i
] = tcg_global_mem_new_i64(cpu_env
,
136 offsetof(CPUPPCState
, avr
[i
].u64
[1]), p
);
138 cpu_avrl
[i
] = tcg_global_mem_new_i64(cpu_env
,
139 offsetof(CPUPPCState
, avr
[i
].u64
[0]), p
);
141 p
+= (i
< 10) ? 6 : 7;
142 cpu_reg_names_size
-= (i
< 10) ? 6 : 7;
143 snprintf(p
, cpu_reg_names_size
, "vsr%d", i
);
144 cpu_vsr
[i
] = tcg_global_mem_new_i64(cpu_env
,
145 offsetof(CPUPPCState
, vsr
[i
]), p
);
146 p
+= (i
< 10) ? 5 : 6;
147 cpu_reg_names_size
-= (i
< 10) ? 5 : 6;
150 cpu_nip
= tcg_global_mem_new(cpu_env
,
151 offsetof(CPUPPCState
, nip
), "nip");
153 cpu_msr
= tcg_global_mem_new(cpu_env
,
154 offsetof(CPUPPCState
, msr
), "msr");
156 cpu_ctr
= tcg_global_mem_new(cpu_env
,
157 offsetof(CPUPPCState
, ctr
), "ctr");
159 cpu_lr
= tcg_global_mem_new(cpu_env
,
160 offsetof(CPUPPCState
, lr
), "lr");
162 #if defined(TARGET_PPC64)
163 cpu_cfar
= tcg_global_mem_new(cpu_env
,
164 offsetof(CPUPPCState
, cfar
), "cfar");
167 cpu_xer
= tcg_global_mem_new(cpu_env
,
168 offsetof(CPUPPCState
, xer
), "xer");
169 cpu_so
= tcg_global_mem_new(cpu_env
,
170 offsetof(CPUPPCState
, so
), "SO");
171 cpu_ov
= tcg_global_mem_new(cpu_env
,
172 offsetof(CPUPPCState
, ov
), "OV");
173 cpu_ca
= tcg_global_mem_new(cpu_env
,
174 offsetof(CPUPPCState
, ca
), "CA");
176 cpu_reserve
= tcg_global_mem_new(cpu_env
,
177 offsetof(CPUPPCState
, reserve_addr
),
180 cpu_fpscr
= tcg_global_mem_new(cpu_env
,
181 offsetof(CPUPPCState
, fpscr
), "fpscr");
183 cpu_access_type
= tcg_global_mem_new_i32(cpu_env
,
184 offsetof(CPUPPCState
, access_type
), "access_type");
189 /* internal defines */
190 struct DisasContext
{
191 struct TranslationBlock
*tb
;
195 /* Routine used to access memory */
196 bool pr
, hv
, dr
, le_mode
;
200 /* Translation flags */
201 TCGMemOp default_tcg_memop_mask
;
202 #if defined(TARGET_PPC64)
207 bool altivec_enabled
;
211 ppc_spr_t
*spr_cb
; /* Needed to check rights for mfspr/mtspr */
212 int singlestep_enabled
;
213 uint64_t insns_flags
;
214 uint64_t insns_flags2
;
217 /* Return true iff byteswap is needed in a scalar memop */
218 static inline bool need_byteswap(const DisasContext
*ctx
)
220 #if defined(TARGET_WORDS_BIGENDIAN)
223 return !ctx
->le_mode
;
227 /* True when active word size < size of target_long. */
229 # define NARROW_MODE(C) (!(C)->sf_mode)
231 # define NARROW_MODE(C) 0
234 struct opc_handler_t
{
235 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
237 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
239 /* instruction type */
241 /* extended instruction type */
244 void (*handler
)(DisasContext
*ctx
);
245 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
248 #if defined(DO_PPC_STATISTICS)
253 static inline void gen_reset_fpstatus(void)
255 gen_helper_reset_fpstatus(cpu_env
);
258 static inline void gen_compute_fprf(TCGv_i64 arg
)
260 gen_helper_compute_fprf(cpu_env
, arg
);
261 gen_helper_float_check_status(cpu_env
);
264 static inline void gen_set_access_type(DisasContext
*ctx
, int access_type
)
266 if (ctx
->access_type
!= access_type
) {
267 tcg_gen_movi_i32(cpu_access_type
, access_type
);
268 ctx
->access_type
= access_type
;
272 static inline void gen_update_nip(DisasContext
*ctx
, target_ulong nip
)
274 if (NARROW_MODE(ctx
)) {
277 tcg_gen_movi_tl(cpu_nip
, nip
);
280 void gen_update_current_nip(void *opaque
)
282 DisasContext
*ctx
= opaque
;
284 tcg_gen_movi_tl(cpu_nip
, ctx
->nip
);
287 static void gen_exception_err(DisasContext
*ctx
, uint32_t excp
, uint32_t error
)
290 if (ctx
->exception
== POWERPC_EXCP_NONE
) {
291 gen_update_nip(ctx
, ctx
->nip
);
293 t0
= tcg_const_i32(excp
);
294 t1
= tcg_const_i32(error
);
295 gen_helper_raise_exception_err(cpu_env
, t0
, t1
);
296 tcg_temp_free_i32(t0
);
297 tcg_temp_free_i32(t1
);
298 ctx
->exception
= (excp
);
301 static void gen_exception(DisasContext
*ctx
, uint32_t excp
)
304 if (ctx
->exception
== POWERPC_EXCP_NONE
) {
305 gen_update_nip(ctx
, ctx
->nip
);
307 t0
= tcg_const_i32(excp
);
308 gen_helper_raise_exception(cpu_env
, t0
);
309 tcg_temp_free_i32(t0
);
310 ctx
->exception
= (excp
);
313 static void gen_debug_exception(DisasContext
*ctx
)
317 if ((ctx
->exception
!= POWERPC_EXCP_BRANCH
) &&
318 (ctx
->exception
!= POWERPC_EXCP_SYNC
)) {
319 gen_update_nip(ctx
, ctx
->nip
);
321 t0
= tcg_const_i32(EXCP_DEBUG
);
322 gen_helper_raise_exception(cpu_env
, t0
);
323 tcg_temp_free_i32(t0
);
326 static inline void gen_inval_exception(DisasContext
*ctx
, uint32_t error
)
328 /* Will be converted to program check if needed */
329 gen_exception_err(ctx
, POWERPC_EXCP_HV_EMU
, POWERPC_EXCP_INVAL
| error
);
332 static inline void gen_priv_exception(DisasContext
*ctx
, uint32_t error
)
334 gen_exception_err(ctx
, POWERPC_EXCP_PROGRAM
, POWERPC_EXCP_PRIV
| error
);
337 static inline void gen_hvpriv_exception(DisasContext
*ctx
, uint32_t error
)
339 /* Will be converted to program check if needed */
340 gen_exception_err(ctx
, POWERPC_EXCP_HV_EMU
, POWERPC_EXCP_PRIV
| error
);
343 /* Stop translation */
344 static inline void gen_stop_exception(DisasContext
*ctx
)
346 gen_update_nip(ctx
, ctx
->nip
);
347 ctx
->exception
= POWERPC_EXCP_STOP
;
350 #ifndef CONFIG_USER_ONLY
351 /* No need to update nip here, as execution flow will change */
352 static inline void gen_sync_exception(DisasContext
*ctx
)
354 ctx
->exception
= POWERPC_EXCP_SYNC
;
358 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
359 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
361 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
362 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
364 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
365 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
367 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
368 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
370 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
371 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
373 typedef struct opcode_t
{
374 unsigned char opc1
, opc2
, opc3
, opc4
;
375 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
376 unsigned char pad
[4];
378 opc_handler_t handler
;
382 /* Helpers for priv. check */
385 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
388 #if defined(CONFIG_USER_ONLY)
389 #define CHK_HV GEN_PRIV
390 #define CHK_SV GEN_PRIV
391 #define CHK_HVRM GEN_PRIV
395 if (unlikely(ctx->pr || !ctx->hv)) { \
401 if (unlikely(ctx->pr)) { \
407 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
416 /*****************************************************************************/
417 /*** Instruction decoding ***/
418 #define EXTRACT_HELPER(name, shift, nb) \
419 static inline uint32_t name(uint32_t opcode) \
421 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
424 #define EXTRACT_SHELPER(name, shift, nb) \
425 static inline int32_t name(uint32_t opcode) \
427 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
430 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \
431 static inline uint32_t name(uint32_t opcode) \
433 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \
434 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \
437 #define EXTRACT_HELPER_DXFORM(name, \
438 d0_bits, shift_op_d0, shift_d0, \
439 d1_bits, shift_op_d1, shift_d1, \
440 d2_bits, shift_op_d2, shift_d2) \
441 static inline int16_t name(uint32_t opcode) \
444 (((opcode >> (shift_op_d0)) & ((1 << (d0_bits)) - 1)) << (shift_d0)) | \
445 (((opcode >> (shift_op_d1)) & ((1 << (d1_bits)) - 1)) << (shift_d1)) | \
446 (((opcode >> (shift_op_d2)) & ((1 << (d2_bits)) - 1)) << (shift_d2)); \
451 EXTRACT_HELPER(opc1
, 26, 6);
453 EXTRACT_HELPER(opc2
, 1, 5);
455 EXTRACT_HELPER(opc3
, 6, 5);
457 EXTRACT_HELPER(opc4
, 16, 5);
458 /* Update Cr0 flags */
459 EXTRACT_HELPER(Rc
, 0, 1);
460 /* Update Cr6 flags (Altivec) */
461 EXTRACT_HELPER(Rc21
, 10, 1);
463 EXTRACT_HELPER(rD
, 21, 5);
465 EXTRACT_HELPER(rS
, 21, 5);
467 EXTRACT_HELPER(rA
, 16, 5);
469 EXTRACT_HELPER(rB
, 11, 5);
471 EXTRACT_HELPER(rC
, 6, 5);
473 EXTRACT_HELPER(crfD
, 23, 3);
474 EXTRACT_HELPER(crfS
, 18, 3);
475 EXTRACT_HELPER(crbD
, 21, 5);
476 EXTRACT_HELPER(crbA
, 16, 5);
477 EXTRACT_HELPER(crbB
, 11, 5);
479 EXTRACT_HELPER(_SPR
, 11, 10);
480 static inline uint32_t SPR(uint32_t opcode
)
482 uint32_t sprn
= _SPR(opcode
);
484 return ((sprn
>> 5) & 0x1F) | ((sprn
& 0x1F) << 5);
486 /*** Get constants ***/
487 /* 16 bits signed immediate value */
488 EXTRACT_SHELPER(SIMM
, 0, 16);
489 /* 16 bits unsigned immediate value */
490 EXTRACT_HELPER(UIMM
, 0, 16);
491 /* 5 bits signed immediate value */
492 EXTRACT_HELPER(SIMM5
, 16, 5);
493 /* 5 bits signed immediate value */
494 EXTRACT_HELPER(UIMM5
, 16, 5);
496 EXTRACT_HELPER(NB
, 11, 5);
498 EXTRACT_HELPER(SH
, 11, 5);
499 /* Vector shift count */
500 EXTRACT_HELPER(VSH
, 6, 4);
502 EXTRACT_HELPER(MB
, 6, 5);
504 EXTRACT_HELPER(ME
, 1, 5);
506 EXTRACT_HELPER(TO
, 21, 5);
508 EXTRACT_HELPER(CRM
, 12, 8);
510 #ifndef CONFIG_USER_ONLY
511 EXTRACT_HELPER(SR
, 16, 4);
515 EXTRACT_HELPER(FPBF
, 23, 3);
516 EXTRACT_HELPER(FPIMM
, 12, 4);
517 EXTRACT_HELPER(FPL
, 25, 1);
518 EXTRACT_HELPER(FPFLM
, 17, 8);
519 EXTRACT_HELPER(FPW
, 16, 1);
522 EXTRACT_HELPER_DXFORM(DX
, 10, 6, 6, 5, 16, 1, 1, 0, 0)
524 /*** Jump target decoding ***/
525 /* Immediate address */
526 static inline target_ulong
LI(uint32_t opcode
)
528 return (opcode
>> 0) & 0x03FFFFFC;
531 static inline uint32_t BD(uint32_t opcode
)
533 return (opcode
>> 0) & 0xFFFC;
536 EXTRACT_HELPER(BO
, 21, 5);
537 EXTRACT_HELPER(BI
, 16, 5);
538 /* Absolute/relative address */
539 EXTRACT_HELPER(AA
, 1, 1);
541 EXTRACT_HELPER(LK
, 0, 1);
544 EXTRACT_HELPER(DCM
, 10, 6)
547 EXTRACT_HELPER(RMC
, 9, 2)
549 /* Create a mask between <start> and <end> bits */
550 static inline target_ulong
MASK(uint32_t start
, uint32_t end
)
554 #if defined(TARGET_PPC64)
555 if (likely(start
== 0)) {
556 ret
= UINT64_MAX
<< (63 - end
);
557 } else if (likely(end
== 63)) {
558 ret
= UINT64_MAX
>> start
;
561 if (likely(start
== 0)) {
562 ret
= UINT32_MAX
<< (31 - end
);
563 } else if (likely(end
== 31)) {
564 ret
= UINT32_MAX
>> start
;
568 ret
= (((target_ulong
)(-1ULL)) >> (start
)) ^
569 (((target_ulong
)(-1ULL) >> (end
)) >> 1);
570 if (unlikely(start
> end
))
577 EXTRACT_HELPER_SPLIT(xT
, 0, 1, 21, 5);
578 EXTRACT_HELPER_SPLIT(xS
, 0, 1, 21, 5);
579 EXTRACT_HELPER_SPLIT(xA
, 2, 1, 16, 5);
580 EXTRACT_HELPER_SPLIT(xB
, 1, 1, 11, 5);
581 EXTRACT_HELPER_SPLIT(xC
, 3, 1, 6, 5);
582 EXTRACT_HELPER(DM
, 8, 2);
583 EXTRACT_HELPER(UIM
, 16, 2);
584 EXTRACT_HELPER(SHW
, 8, 2);
585 EXTRACT_HELPER(SP
, 19, 2);
586 /*****************************************************************************/
587 /* PowerPC instructions table */
589 #if defined(DO_PPC_STATISTICS)
590 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
600 .handler = &gen_##name, \
601 .oname = stringify(name), \
603 .oname = stringify(name), \
605 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
616 .handler = &gen_##name, \
617 .oname = stringify(name), \
619 .oname = stringify(name), \
621 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
631 .handler = &gen_##name, \
636 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
646 .handler = &gen_##name, \
647 .oname = stringify(name), \
649 .oname = stringify(name), \
652 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
662 .handler = &gen_##name, \
664 .oname = stringify(name), \
666 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
677 .handler = &gen_##name, \
679 .oname = stringify(name), \
681 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
691 .handler = &gen_##name, \
695 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
705 .handler = &gen_##name, \
707 .oname = stringify(name), \
711 /* SPR load/store helpers */
712 static inline void gen_load_spr(TCGv t
, int reg
)
714 tcg_gen_ld_tl(t
, cpu_env
, offsetof(CPUPPCState
, spr
[reg
]));
717 static inline void gen_store_spr(int reg
, TCGv t
)
719 tcg_gen_st_tl(t
, cpu_env
, offsetof(CPUPPCState
, spr
[reg
]));
722 /* Invalid instruction */
723 static void gen_invalid(DisasContext
*ctx
)
725 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
728 static opc_handler_t invalid_handler
= {
729 .inval1
= 0xFFFFFFFF,
730 .inval2
= 0xFFFFFFFF,
733 .handler
= gen_invalid
,
736 /*** Integer comparison ***/
738 static inline void gen_op_cmp(TCGv arg0
, TCGv arg1
, int s
, int crf
)
740 TCGv t0
= tcg_temp_new();
741 TCGv_i32 t1
= tcg_temp_new_i32();
743 tcg_gen_trunc_tl_i32(cpu_crf
[crf
], cpu_so
);
745 tcg_gen_setcond_tl((s
? TCG_COND_LT
: TCG_COND_LTU
), t0
, arg0
, arg1
);
746 tcg_gen_trunc_tl_i32(t1
, t0
);
747 tcg_gen_shli_i32(t1
, t1
, CRF_LT
);
748 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
750 tcg_gen_setcond_tl((s
? TCG_COND_GT
: TCG_COND_GTU
), t0
, arg0
, arg1
);
751 tcg_gen_trunc_tl_i32(t1
, t0
);
752 tcg_gen_shli_i32(t1
, t1
, CRF_GT
);
753 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
755 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, arg0
, arg1
);
756 tcg_gen_trunc_tl_i32(t1
, t0
);
757 tcg_gen_shli_i32(t1
, t1
, CRF_EQ
);
758 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
761 tcg_temp_free_i32(t1
);
764 static inline void gen_op_cmpi(TCGv arg0
, target_ulong arg1
, int s
, int crf
)
766 TCGv t0
= tcg_const_tl(arg1
);
767 gen_op_cmp(arg0
, t0
, s
, crf
);
771 static inline void gen_op_cmp32(TCGv arg0
, TCGv arg1
, int s
, int crf
)
777 tcg_gen_ext32s_tl(t0
, arg0
);
778 tcg_gen_ext32s_tl(t1
, arg1
);
780 tcg_gen_ext32u_tl(t0
, arg0
);
781 tcg_gen_ext32u_tl(t1
, arg1
);
783 gen_op_cmp(t0
, t1
, s
, crf
);
788 static inline void gen_op_cmpi32(TCGv arg0
, target_ulong arg1
, int s
, int crf
)
790 TCGv t0
= tcg_const_tl(arg1
);
791 gen_op_cmp32(arg0
, t0
, s
, crf
);
795 static inline void gen_set_Rc0(DisasContext
*ctx
, TCGv reg
)
797 if (NARROW_MODE(ctx
)) {
798 gen_op_cmpi32(reg
, 0, 1, 0);
800 gen_op_cmpi(reg
, 0, 1, 0);
805 static void gen_cmp(DisasContext
*ctx
)
807 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
808 gen_op_cmp(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
809 1, crfD(ctx
->opcode
));
811 gen_op_cmp32(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
812 1, crfD(ctx
->opcode
));
817 static void gen_cmpi(DisasContext
*ctx
)
819 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
820 gen_op_cmpi(cpu_gpr
[rA(ctx
->opcode
)], SIMM(ctx
->opcode
),
821 1, crfD(ctx
->opcode
));
823 gen_op_cmpi32(cpu_gpr
[rA(ctx
->opcode
)], SIMM(ctx
->opcode
),
824 1, crfD(ctx
->opcode
));
829 static void gen_cmpl(DisasContext
*ctx
)
831 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
832 gen_op_cmp(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
833 0, crfD(ctx
->opcode
));
835 gen_op_cmp32(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
836 0, crfD(ctx
->opcode
));
841 static void gen_cmpli(DisasContext
*ctx
)
843 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
844 gen_op_cmpi(cpu_gpr
[rA(ctx
->opcode
)], UIMM(ctx
->opcode
),
845 0, crfD(ctx
->opcode
));
847 gen_op_cmpi32(cpu_gpr
[rA(ctx
->opcode
)], UIMM(ctx
->opcode
),
848 0, crfD(ctx
->opcode
));
852 /* cmprb - range comparison: isupper, isaplha, islower*/
853 static void gen_cmprb(DisasContext
*ctx
)
855 TCGv_i32 src1
= tcg_temp_new_i32();
856 TCGv_i32 src2
= tcg_temp_new_i32();
857 TCGv_i32 src2lo
= tcg_temp_new_i32();
858 TCGv_i32 src2hi
= tcg_temp_new_i32();
859 TCGv_i32 crf
= cpu_crf
[crfD(ctx
->opcode
)];
861 tcg_gen_trunc_tl_i32(src1
, cpu_gpr
[rA(ctx
->opcode
)]);
862 tcg_gen_trunc_tl_i32(src2
, cpu_gpr
[rB(ctx
->opcode
)]);
864 tcg_gen_andi_i32(src1
, src1
, 0xFF);
865 tcg_gen_ext8u_i32(src2lo
, src2
);
866 tcg_gen_shri_i32(src2
, src2
, 8);
867 tcg_gen_ext8u_i32(src2hi
, src2
);
869 tcg_gen_setcond_i32(TCG_COND_LEU
, src2lo
, src2lo
, src1
);
870 tcg_gen_setcond_i32(TCG_COND_LEU
, src2hi
, src1
, src2hi
);
871 tcg_gen_and_i32(crf
, src2lo
, src2hi
);
873 if (ctx
->opcode
& 0x00200000) {
874 tcg_gen_shri_i32(src2
, src2
, 8);
875 tcg_gen_ext8u_i32(src2lo
, src2
);
876 tcg_gen_shri_i32(src2
, src2
, 8);
877 tcg_gen_ext8u_i32(src2hi
, src2
);
878 tcg_gen_setcond_i32(TCG_COND_LEU
, src2lo
, src2lo
, src1
);
879 tcg_gen_setcond_i32(TCG_COND_LEU
, src2hi
, src1
, src2hi
);
880 tcg_gen_and_i32(src2lo
, src2lo
, src2hi
);
881 tcg_gen_or_i32(crf
, crf
, src2lo
);
883 tcg_gen_shli_i32(crf
, crf
, CRF_GT
);
884 tcg_temp_free_i32(src1
);
885 tcg_temp_free_i32(src2
);
886 tcg_temp_free_i32(src2lo
);
887 tcg_temp_free_i32(src2hi
);
890 #if defined(TARGET_PPC64)
892 static void gen_cmpeqb(DisasContext
*ctx
)
894 gen_helper_cmpeqb(cpu_crf
[crfD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
895 cpu_gpr
[rB(ctx
->opcode
)]);
899 /* isel (PowerPC 2.03 specification) */
900 static void gen_isel(DisasContext
*ctx
)
902 uint32_t bi
= rC(ctx
->opcode
);
903 uint32_t mask
= 0x08 >> (bi
& 0x03);
904 TCGv t0
= tcg_temp_new();
907 tcg_gen_extu_i32_tl(t0
, cpu_crf
[bi
>> 2]);
908 tcg_gen_andi_tl(t0
, t0
, mask
);
910 zr
= tcg_const_tl(0);
911 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rD(ctx
->opcode
)], t0
, zr
,
912 rA(ctx
->opcode
) ? cpu_gpr
[rA(ctx
->opcode
)] : zr
,
913 cpu_gpr
[rB(ctx
->opcode
)]);
918 /* cmpb: PowerPC 2.05 specification */
919 static void gen_cmpb(DisasContext
*ctx
)
921 gen_helper_cmpb(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)],
922 cpu_gpr
[rB(ctx
->opcode
)]);
925 /*** Integer arithmetic ***/
927 static inline void gen_op_arith_compute_ov(DisasContext
*ctx
, TCGv arg0
,
928 TCGv arg1
, TCGv arg2
, int sub
)
930 TCGv t0
= tcg_temp_new();
932 tcg_gen_xor_tl(cpu_ov
, arg0
, arg2
);
933 tcg_gen_xor_tl(t0
, arg1
, arg2
);
935 tcg_gen_and_tl(cpu_ov
, cpu_ov
, t0
);
937 tcg_gen_andc_tl(cpu_ov
, cpu_ov
, t0
);
940 if (NARROW_MODE(ctx
)) {
941 tcg_gen_ext32s_tl(cpu_ov
, cpu_ov
);
943 tcg_gen_shri_tl(cpu_ov
, cpu_ov
, TARGET_LONG_BITS
- 1);
944 tcg_gen_or_tl(cpu_so
, cpu_so
, cpu_ov
);
947 /* Common add function */
948 static inline void gen_op_arith_add(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
949 TCGv arg2
, bool add_ca
, bool compute_ca
,
950 bool compute_ov
, bool compute_rc0
)
954 if (compute_ca
|| compute_ov
) {
959 if (NARROW_MODE(ctx
)) {
960 /* Caution: a non-obvious corner case of the spec is that we
961 must produce the *entire* 64-bit addition, but produce the
962 carry into bit 32. */
963 TCGv t1
= tcg_temp_new();
964 tcg_gen_xor_tl(t1
, arg1
, arg2
); /* add without carry */
965 tcg_gen_add_tl(t0
, arg1
, arg2
);
967 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
969 tcg_gen_xor_tl(cpu_ca
, t0
, t1
); /* bits changed w/ carry */
971 tcg_gen_shri_tl(cpu_ca
, cpu_ca
, 32); /* extract bit 32 */
972 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
974 TCGv zero
= tcg_const_tl(0);
976 tcg_gen_add2_tl(t0
, cpu_ca
, arg1
, zero
, cpu_ca
, zero
);
977 tcg_gen_add2_tl(t0
, cpu_ca
, t0
, cpu_ca
, arg2
, zero
);
979 tcg_gen_add2_tl(t0
, cpu_ca
, arg1
, zero
, arg2
, zero
);
984 tcg_gen_add_tl(t0
, arg1
, arg2
);
986 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
991 gen_op_arith_compute_ov(ctx
, t0
, arg1
, arg2
, 0);
993 if (unlikely(compute_rc0
)) {
994 gen_set_Rc0(ctx
, t0
);
997 if (!TCGV_EQUAL(t0
, ret
)) {
998 tcg_gen_mov_tl(ret
, t0
);
1002 /* Add functions with two operands */
1003 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
1004 static void glue(gen_, name)(DisasContext *ctx) \
1006 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1007 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1008 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1010 /* Add functions with one operand and one immediate */
1011 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
1012 add_ca, compute_ca, compute_ov) \
1013 static void glue(gen_, name)(DisasContext *ctx) \
1015 TCGv t0 = tcg_const_tl(const_val); \
1016 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1017 cpu_gpr[rA(ctx->opcode)], t0, \
1018 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1019 tcg_temp_free(t0); \
1022 /* add add. addo addo. */
1023 GEN_INT_ARITH_ADD(add
, 0x08, 0, 0, 0)
1024 GEN_INT_ARITH_ADD(addo
, 0x18, 0, 0, 1)
1025 /* addc addc. addco addco. */
1026 GEN_INT_ARITH_ADD(addc
, 0x00, 0, 1, 0)
1027 GEN_INT_ARITH_ADD(addco
, 0x10, 0, 1, 1)
1028 /* adde adde. addeo addeo. */
1029 GEN_INT_ARITH_ADD(adde
, 0x04, 1, 1, 0)
1030 GEN_INT_ARITH_ADD(addeo
, 0x14, 1, 1, 1)
1031 /* addme addme. addmeo addmeo. */
1032 GEN_INT_ARITH_ADD_CONST(addme
, 0x07, -1LL, 1, 1, 0)
1033 GEN_INT_ARITH_ADD_CONST(addmeo
, 0x17, -1LL, 1, 1, 1)
1034 /* addze addze. addzeo addzeo.*/
1035 GEN_INT_ARITH_ADD_CONST(addze
, 0x06, 0, 1, 1, 0)
1036 GEN_INT_ARITH_ADD_CONST(addzeo
, 0x16, 0, 1, 1, 1)
1038 static void gen_addi(DisasContext
*ctx
)
1040 target_long simm
= SIMM(ctx
->opcode
);
1042 if (rA(ctx
->opcode
) == 0) {
1044 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
);
1046 tcg_gen_addi_tl(cpu_gpr
[rD(ctx
->opcode
)],
1047 cpu_gpr
[rA(ctx
->opcode
)], simm
);
1051 static inline void gen_op_addic(DisasContext
*ctx
, bool compute_rc0
)
1053 TCGv c
= tcg_const_tl(SIMM(ctx
->opcode
));
1054 gen_op_arith_add(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1055 c
, 0, 1, 0, compute_rc0
);
1059 static void gen_addic(DisasContext
*ctx
)
1061 gen_op_addic(ctx
, 0);
1064 static void gen_addic_(DisasContext
*ctx
)
1066 gen_op_addic(ctx
, 1);
1070 static void gen_addis(DisasContext
*ctx
)
1072 target_long simm
= SIMM(ctx
->opcode
);
1074 if (rA(ctx
->opcode
) == 0) {
1076 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
<< 16);
1078 tcg_gen_addi_tl(cpu_gpr
[rD(ctx
->opcode
)],
1079 cpu_gpr
[rA(ctx
->opcode
)], simm
<< 16);
1084 static void gen_addpcis(DisasContext
*ctx
)
1086 target_long d
= DX(ctx
->opcode
);
1088 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], ctx
->nip
+ (d
<< 16));
1091 static inline void gen_op_arith_divw(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1092 TCGv arg2
, int sign
, int compute_ov
)
1094 TCGLabel
*l1
= gen_new_label();
1095 TCGLabel
*l2
= gen_new_label();
1096 TCGv_i32 t0
= tcg_temp_local_new_i32();
1097 TCGv_i32 t1
= tcg_temp_local_new_i32();
1099 tcg_gen_trunc_tl_i32(t0
, arg1
);
1100 tcg_gen_trunc_tl_i32(t1
, arg2
);
1101 tcg_gen_brcondi_i32(TCG_COND_EQ
, t1
, 0, l1
);
1103 TCGLabel
*l3
= gen_new_label();
1104 tcg_gen_brcondi_i32(TCG_COND_NE
, t1
, -1, l3
);
1105 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, INT32_MIN
, l1
);
1107 tcg_gen_div_i32(t0
, t0
, t1
);
1109 tcg_gen_divu_i32(t0
, t0
, t1
);
1112 tcg_gen_movi_tl(cpu_ov
, 0);
1117 tcg_gen_sari_i32(t0
, t0
, 31);
1119 tcg_gen_movi_i32(t0
, 0);
1122 tcg_gen_movi_tl(cpu_ov
, 1);
1123 tcg_gen_movi_tl(cpu_so
, 1);
1126 tcg_gen_extu_i32_tl(ret
, t0
);
1127 tcg_temp_free_i32(t0
);
1128 tcg_temp_free_i32(t1
);
1129 if (unlikely(Rc(ctx
->opcode
) != 0))
1130 gen_set_Rc0(ctx
, ret
);
1133 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1134 static void glue(gen_, name)(DisasContext *ctx) \
1136 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1137 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1138 sign, compute_ov); \
1140 /* divwu divwu. divwuo divwuo. */
1141 GEN_INT_ARITH_DIVW(divwu
, 0x0E, 0, 0);
1142 GEN_INT_ARITH_DIVW(divwuo
, 0x1E, 0, 1);
1143 /* divw divw. divwo divwo. */
1144 GEN_INT_ARITH_DIVW(divw
, 0x0F, 1, 0);
1145 GEN_INT_ARITH_DIVW(divwo
, 0x1F, 1, 1);
1147 /* div[wd]eu[o][.] */
1148 #define GEN_DIVE(name, hlpr, compute_ov) \
1149 static void gen_##name(DisasContext *ctx) \
1151 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1152 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1153 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1154 tcg_temp_free_i32(t0); \
1155 if (unlikely(Rc(ctx->opcode) != 0)) { \
1156 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1160 GEN_DIVE(divweu
, divweu
, 0);
1161 GEN_DIVE(divweuo
, divweu
, 1);
1162 GEN_DIVE(divwe
, divwe
, 0);
1163 GEN_DIVE(divweo
, divwe
, 1);
1165 #if defined(TARGET_PPC64)
1166 static inline void gen_op_arith_divd(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1167 TCGv arg2
, int sign
, int compute_ov
)
1169 TCGLabel
*l1
= gen_new_label();
1170 TCGLabel
*l2
= gen_new_label();
1172 tcg_gen_brcondi_i64(TCG_COND_EQ
, arg2
, 0, l1
);
1174 TCGLabel
*l3
= gen_new_label();
1175 tcg_gen_brcondi_i64(TCG_COND_NE
, arg2
, -1, l3
);
1176 tcg_gen_brcondi_i64(TCG_COND_EQ
, arg1
, INT64_MIN
, l1
);
1178 tcg_gen_div_i64(ret
, arg1
, arg2
);
1180 tcg_gen_divu_i64(ret
, arg1
, arg2
);
1183 tcg_gen_movi_tl(cpu_ov
, 0);
1188 tcg_gen_sari_i64(ret
, arg1
, 63);
1190 tcg_gen_movi_i64(ret
, 0);
1193 tcg_gen_movi_tl(cpu_ov
, 1);
1194 tcg_gen_movi_tl(cpu_so
, 1);
1197 if (unlikely(Rc(ctx
->opcode
) != 0))
1198 gen_set_Rc0(ctx
, ret
);
1200 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1201 static void glue(gen_, name)(DisasContext *ctx) \
1203 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1204 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1205 sign, compute_ov); \
1207 /* divwu divwu. divwuo divwuo. */
1208 GEN_INT_ARITH_DIVD(divdu
, 0x0E, 0, 0);
1209 GEN_INT_ARITH_DIVD(divduo
, 0x1E, 0, 1);
1210 /* divw divw. divwo divwo. */
1211 GEN_INT_ARITH_DIVD(divd
, 0x0F, 1, 0);
1212 GEN_INT_ARITH_DIVD(divdo
, 0x1F, 1, 1);
1214 GEN_DIVE(divdeu
, divdeu
, 0);
1215 GEN_DIVE(divdeuo
, divdeu
, 1);
1216 GEN_DIVE(divde
, divde
, 0);
1217 GEN_DIVE(divdeo
, divde
, 1);
1220 static inline void gen_op_arith_modw(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1221 TCGv arg2
, int sign
)
1223 TCGv_i32 t0
= tcg_temp_new_i32();
1224 TCGv_i32 t1
= tcg_temp_new_i32();
1226 tcg_gen_trunc_tl_i32(t0
, arg1
);
1227 tcg_gen_trunc_tl_i32(t1
, arg2
);
1229 TCGv_i32 t2
= tcg_temp_new_i32();
1230 TCGv_i32 t3
= tcg_temp_new_i32();
1231 tcg_gen_setcondi_i32(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
1232 tcg_gen_setcondi_i32(TCG_COND_EQ
, t3
, t1
, -1);
1233 tcg_gen_and_i32(t2
, t2
, t3
);
1234 tcg_gen_setcondi_i32(TCG_COND_EQ
, t3
, t1
, 0);
1235 tcg_gen_or_i32(t2
, t2
, t3
);
1236 tcg_gen_movi_i32(t3
, 0);
1237 tcg_gen_movcond_i32(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
1238 tcg_gen_rem_i32(t3
, t0
, t1
);
1239 tcg_gen_ext_i32_tl(ret
, t3
);
1240 tcg_temp_free_i32(t2
);
1241 tcg_temp_free_i32(t3
);
1243 TCGv_i32 t2
= tcg_const_i32(1);
1244 TCGv_i32 t3
= tcg_const_i32(0);
1245 tcg_gen_movcond_i32(TCG_COND_EQ
, t1
, t1
, t3
, t2
, t1
);
1246 tcg_gen_remu_i32(t3
, t0
, t1
);
1247 tcg_gen_extu_i32_tl(ret
, t3
);
1248 tcg_temp_free_i32(t2
);
1249 tcg_temp_free_i32(t3
);
1251 tcg_temp_free_i32(t0
);
1252 tcg_temp_free_i32(t1
);
1255 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
1256 static void glue(gen_, name)(DisasContext *ctx) \
1258 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
1259 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1263 GEN_INT_ARITH_MODW(moduw
, 0x08, 0);
1264 GEN_INT_ARITH_MODW(modsw
, 0x18, 1);
1266 #if defined(TARGET_PPC64)
1267 static inline void gen_op_arith_modd(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1268 TCGv arg2
, int sign
)
1270 TCGv_i64 t0
= tcg_temp_new_i64();
1271 TCGv_i64 t1
= tcg_temp_new_i64();
1273 tcg_gen_mov_i64(t0
, arg1
);
1274 tcg_gen_mov_i64(t1
, arg2
);
1276 TCGv_i64 t2
= tcg_temp_new_i64();
1277 TCGv_i64 t3
= tcg_temp_new_i64();
1278 tcg_gen_setcondi_i64(TCG_COND_EQ
, t2
, t0
, INT64_MIN
);
1279 tcg_gen_setcondi_i64(TCG_COND_EQ
, t3
, t1
, -1);
1280 tcg_gen_and_i64(t2
, t2
, t3
);
1281 tcg_gen_setcondi_i64(TCG_COND_EQ
, t3
, t1
, 0);
1282 tcg_gen_or_i64(t2
, t2
, t3
);
1283 tcg_gen_movi_i64(t3
, 0);
1284 tcg_gen_movcond_i64(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
1285 tcg_gen_rem_i64(ret
, t0
, t1
);
1286 tcg_temp_free_i64(t2
);
1287 tcg_temp_free_i64(t3
);
1289 TCGv_i64 t2
= tcg_const_i64(1);
1290 TCGv_i64 t3
= tcg_const_i64(0);
1291 tcg_gen_movcond_i64(TCG_COND_EQ
, t1
, t1
, t3
, t2
, t1
);
1292 tcg_gen_remu_i64(ret
, t0
, t1
);
1293 tcg_temp_free_i64(t2
);
1294 tcg_temp_free_i64(t3
);
1296 tcg_temp_free_i64(t0
);
1297 tcg_temp_free_i64(t1
);
1300 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
1301 static void glue(gen_, name)(DisasContext *ctx) \
1303 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
1304 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1308 GEN_INT_ARITH_MODD(modud
, 0x08, 0);
1309 GEN_INT_ARITH_MODD(modsd
, 0x18, 1);
1313 static void gen_mulhw(DisasContext
*ctx
)
1315 TCGv_i32 t0
= tcg_temp_new_i32();
1316 TCGv_i32 t1
= tcg_temp_new_i32();
1318 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1319 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1320 tcg_gen_muls2_i32(t0
, t1
, t0
, t1
);
1321 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
1322 tcg_temp_free_i32(t0
);
1323 tcg_temp_free_i32(t1
);
1324 if (unlikely(Rc(ctx
->opcode
) != 0))
1325 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1328 /* mulhwu mulhwu. */
1329 static void gen_mulhwu(DisasContext
*ctx
)
1331 TCGv_i32 t0
= tcg_temp_new_i32();
1332 TCGv_i32 t1
= tcg_temp_new_i32();
1334 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1335 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1336 tcg_gen_mulu2_i32(t0
, t1
, t0
, t1
);
1337 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
1338 tcg_temp_free_i32(t0
);
1339 tcg_temp_free_i32(t1
);
1340 if (unlikely(Rc(ctx
->opcode
) != 0))
1341 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1345 static void gen_mullw(DisasContext
*ctx
)
1347 #if defined(TARGET_PPC64)
1349 t0
= tcg_temp_new_i64();
1350 t1
= tcg_temp_new_i64();
1351 tcg_gen_ext32s_tl(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1352 tcg_gen_ext32s_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1353 tcg_gen_mul_i64(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
1357 tcg_gen_mul_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1358 cpu_gpr
[rB(ctx
->opcode
)]);
1360 if (unlikely(Rc(ctx
->opcode
) != 0))
1361 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1364 /* mullwo mullwo. */
1365 static void gen_mullwo(DisasContext
*ctx
)
1367 TCGv_i32 t0
= tcg_temp_new_i32();
1368 TCGv_i32 t1
= tcg_temp_new_i32();
1370 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1371 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1372 tcg_gen_muls2_i32(t0
, t1
, t0
, t1
);
1373 #if defined(TARGET_PPC64)
1374 tcg_gen_concat_i32_i64(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
1376 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], t0
);
1379 tcg_gen_sari_i32(t0
, t0
, 31);
1380 tcg_gen_setcond_i32(TCG_COND_NE
, t0
, t0
, t1
);
1381 tcg_gen_extu_i32_tl(cpu_ov
, t0
);
1382 tcg_gen_or_tl(cpu_so
, cpu_so
, cpu_ov
);
1384 tcg_temp_free_i32(t0
);
1385 tcg_temp_free_i32(t1
);
1386 if (unlikely(Rc(ctx
->opcode
) != 0))
1387 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1391 static void gen_mulli(DisasContext
*ctx
)
1393 tcg_gen_muli_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1397 #if defined(TARGET_PPC64)
1399 static void gen_mulhd(DisasContext
*ctx
)
1401 TCGv lo
= tcg_temp_new();
1402 tcg_gen_muls2_tl(lo
, cpu_gpr
[rD(ctx
->opcode
)],
1403 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1405 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1406 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1410 /* mulhdu mulhdu. */
1411 static void gen_mulhdu(DisasContext
*ctx
)
1413 TCGv lo
= tcg_temp_new();
1414 tcg_gen_mulu2_tl(lo
, cpu_gpr
[rD(ctx
->opcode
)],
1415 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1417 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1418 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1423 static void gen_mulld(DisasContext
*ctx
)
1425 tcg_gen_mul_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1426 cpu_gpr
[rB(ctx
->opcode
)]);
1427 if (unlikely(Rc(ctx
->opcode
) != 0))
1428 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1431 /* mulldo mulldo. */
1432 static void gen_mulldo(DisasContext
*ctx
)
1434 TCGv_i64 t0
= tcg_temp_new_i64();
1435 TCGv_i64 t1
= tcg_temp_new_i64();
1437 tcg_gen_muls2_i64(t0
, t1
, cpu_gpr
[rA(ctx
->opcode
)],
1438 cpu_gpr
[rB(ctx
->opcode
)]);
1439 tcg_gen_mov_i64(cpu_gpr
[rD(ctx
->opcode
)], t0
);
1441 tcg_gen_sari_i64(t0
, t0
, 63);
1442 tcg_gen_setcond_i64(TCG_COND_NE
, cpu_ov
, t0
, t1
);
1443 tcg_gen_or_tl(cpu_so
, cpu_so
, cpu_ov
);
1445 tcg_temp_free_i64(t0
);
1446 tcg_temp_free_i64(t1
);
1448 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1449 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1454 /* Common subf function */
1455 static inline void gen_op_arith_subf(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1456 TCGv arg2
, bool add_ca
, bool compute_ca
,
1457 bool compute_ov
, bool compute_rc0
)
1461 if (compute_ca
|| compute_ov
) {
1462 t0
= tcg_temp_new();
1466 /* dest = ~arg1 + arg2 [+ ca]. */
1467 if (NARROW_MODE(ctx
)) {
1468 /* Caution: a non-obvious corner case of the spec is that we
1469 must produce the *entire* 64-bit addition, but produce the
1470 carry into bit 32. */
1471 TCGv inv1
= tcg_temp_new();
1472 TCGv t1
= tcg_temp_new();
1473 tcg_gen_not_tl(inv1
, arg1
);
1475 tcg_gen_add_tl(t0
, arg2
, cpu_ca
);
1477 tcg_gen_addi_tl(t0
, arg2
, 1);
1479 tcg_gen_xor_tl(t1
, arg2
, inv1
); /* add without carry */
1480 tcg_gen_add_tl(t0
, t0
, inv1
);
1481 tcg_temp_free(inv1
);
1482 tcg_gen_xor_tl(cpu_ca
, t0
, t1
); /* bits changes w/ carry */
1484 tcg_gen_shri_tl(cpu_ca
, cpu_ca
, 32); /* extract bit 32 */
1485 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
1486 } else if (add_ca
) {
1487 TCGv zero
, inv1
= tcg_temp_new();
1488 tcg_gen_not_tl(inv1
, arg1
);
1489 zero
= tcg_const_tl(0);
1490 tcg_gen_add2_tl(t0
, cpu_ca
, arg2
, zero
, cpu_ca
, zero
);
1491 tcg_gen_add2_tl(t0
, cpu_ca
, t0
, cpu_ca
, inv1
, zero
);
1492 tcg_temp_free(zero
);
1493 tcg_temp_free(inv1
);
1495 tcg_gen_setcond_tl(TCG_COND_GEU
, cpu_ca
, arg2
, arg1
);
1496 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1498 } else if (add_ca
) {
1499 /* Since we're ignoring carry-out, we can simplify the
1500 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1501 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1502 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
1503 tcg_gen_subi_tl(t0
, t0
, 1);
1505 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1509 gen_op_arith_compute_ov(ctx
, t0
, arg1
, arg2
, 1);
1511 if (unlikely(compute_rc0
)) {
1512 gen_set_Rc0(ctx
, t0
);
1515 if (!TCGV_EQUAL(t0
, ret
)) {
1516 tcg_gen_mov_tl(ret
, t0
);
1520 /* Sub functions with Two operands functions */
1521 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1522 static void glue(gen_, name)(DisasContext *ctx) \
1524 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1525 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1526 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1528 /* Sub functions with one operand and one immediate */
1529 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1530 add_ca, compute_ca, compute_ov) \
1531 static void glue(gen_, name)(DisasContext *ctx) \
1533 TCGv t0 = tcg_const_tl(const_val); \
1534 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1535 cpu_gpr[rA(ctx->opcode)], t0, \
1536 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1537 tcg_temp_free(t0); \
1539 /* subf subf. subfo subfo. */
1540 GEN_INT_ARITH_SUBF(subf
, 0x01, 0, 0, 0)
1541 GEN_INT_ARITH_SUBF(subfo
, 0x11, 0, 0, 1)
1542 /* subfc subfc. subfco subfco. */
1543 GEN_INT_ARITH_SUBF(subfc
, 0x00, 0, 1, 0)
1544 GEN_INT_ARITH_SUBF(subfco
, 0x10, 0, 1, 1)
1545 /* subfe subfe. subfeo subfo. */
1546 GEN_INT_ARITH_SUBF(subfe
, 0x04, 1, 1, 0)
1547 GEN_INT_ARITH_SUBF(subfeo
, 0x14, 1, 1, 1)
1548 /* subfme subfme. subfmeo subfmeo. */
1549 GEN_INT_ARITH_SUBF_CONST(subfme
, 0x07, -1LL, 1, 1, 0)
1550 GEN_INT_ARITH_SUBF_CONST(subfmeo
, 0x17, -1LL, 1, 1, 1)
1551 /* subfze subfze. subfzeo subfzeo.*/
1552 GEN_INT_ARITH_SUBF_CONST(subfze
, 0x06, 0, 1, 1, 0)
1553 GEN_INT_ARITH_SUBF_CONST(subfzeo
, 0x16, 0, 1, 1, 1)
1556 static void gen_subfic(DisasContext
*ctx
)
1558 TCGv c
= tcg_const_tl(SIMM(ctx
->opcode
));
1559 gen_op_arith_subf(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1564 /* neg neg. nego nego. */
1565 static inline void gen_op_arith_neg(DisasContext
*ctx
, bool compute_ov
)
1567 TCGv zero
= tcg_const_tl(0);
1568 gen_op_arith_subf(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1569 zero
, 0, 0, compute_ov
, Rc(ctx
->opcode
));
1570 tcg_temp_free(zero
);
1573 static void gen_neg(DisasContext
*ctx
)
1575 gen_op_arith_neg(ctx
, 0);
1578 static void gen_nego(DisasContext
*ctx
)
1580 gen_op_arith_neg(ctx
, 1);
1583 /*** Integer logical ***/
1584 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1585 static void glue(gen_, name)(DisasContext *ctx) \
1587 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1588 cpu_gpr[rB(ctx->opcode)]); \
1589 if (unlikely(Rc(ctx->opcode) != 0)) \
1590 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1593 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1594 static void glue(gen_, name)(DisasContext *ctx) \
1596 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1597 if (unlikely(Rc(ctx->opcode) != 0)) \
1598 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1602 GEN_LOGICAL2(and, tcg_gen_and_tl
, 0x00, PPC_INTEGER
);
1604 GEN_LOGICAL2(andc
, tcg_gen_andc_tl
, 0x01, PPC_INTEGER
);
1607 static void gen_andi_(DisasContext
*ctx
)
1609 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], UIMM(ctx
->opcode
));
1610 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1614 static void gen_andis_(DisasContext
*ctx
)
1616 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], UIMM(ctx
->opcode
) << 16);
1617 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1621 static void gen_cntlzw(DisasContext
*ctx
)
1623 gen_helper_cntlzw(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1624 if (unlikely(Rc(ctx
->opcode
) != 0))
1625 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1629 static void gen_cnttzw(DisasContext
*ctx
)
1631 gen_helper_cnttzw(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1632 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1633 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1638 GEN_LOGICAL2(eqv
, tcg_gen_eqv_tl
, 0x08, PPC_INTEGER
);
1639 /* extsb & extsb. */
1640 GEN_LOGICAL1(extsb
, tcg_gen_ext8s_tl
, 0x1D, PPC_INTEGER
);
1641 /* extsh & extsh. */
1642 GEN_LOGICAL1(extsh
, tcg_gen_ext16s_tl
, 0x1C, PPC_INTEGER
);
1644 GEN_LOGICAL2(nand
, tcg_gen_nand_tl
, 0x0E, PPC_INTEGER
);
1646 GEN_LOGICAL2(nor
, tcg_gen_nor_tl
, 0x03, PPC_INTEGER
);
1648 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
1649 static void gen_pause(DisasContext
*ctx
)
1651 TCGv_i32 t0
= tcg_const_i32(0);
1652 tcg_gen_st_i32(t0
, cpu_env
,
1653 -offsetof(PowerPCCPU
, env
) + offsetof(CPUState
, halted
));
1654 tcg_temp_free_i32(t0
);
1656 /* Stop translation, this gives other CPUs a chance to run */
1657 gen_exception_err(ctx
, EXCP_HLT
, 1);
1659 #endif /* defined(TARGET_PPC64) */
1662 static void gen_or(DisasContext
*ctx
)
1666 rs
= rS(ctx
->opcode
);
1667 ra
= rA(ctx
->opcode
);
1668 rb
= rB(ctx
->opcode
);
1669 /* Optimisation for mr. ri case */
1670 if (rs
!= ra
|| rs
!= rb
) {
1672 tcg_gen_or_tl(cpu_gpr
[ra
], cpu_gpr
[rs
], cpu_gpr
[rb
]);
1674 tcg_gen_mov_tl(cpu_gpr
[ra
], cpu_gpr
[rs
]);
1675 if (unlikely(Rc(ctx
->opcode
) != 0))
1676 gen_set_Rc0(ctx
, cpu_gpr
[ra
]);
1677 } else if (unlikely(Rc(ctx
->opcode
) != 0)) {
1678 gen_set_Rc0(ctx
, cpu_gpr
[rs
]);
1679 #if defined(TARGET_PPC64)
1680 } else if (rs
!= 0) { /* 0 is nop */
1685 /* Set process priority to low */
1689 /* Set process priority to medium-low */
1693 /* Set process priority to normal */
1696 #if !defined(CONFIG_USER_ONLY)
1699 /* Set process priority to very low */
1705 /* Set process priority to medium-hight */
1711 /* Set process priority to high */
1716 if (ctx
->hv
&& !ctx
->pr
) {
1717 /* Set process priority to very high */
1726 TCGv t0
= tcg_temp_new();
1727 gen_load_spr(t0
, SPR_PPR
);
1728 tcg_gen_andi_tl(t0
, t0
, ~0x001C000000000000ULL
);
1729 tcg_gen_ori_tl(t0
, t0
, ((uint64_t)prio
) << 50);
1730 gen_store_spr(SPR_PPR
, t0
);
1733 #if !defined(CONFIG_USER_ONLY)
1734 /* Pause out of TCG otherwise spin loops with smt_low eat too much
1735 * CPU and the kernel hangs. This applies to all encodings other
1736 * than no-op, e.g., miso(rs=26), yield(27), mdoio(29), mdoom(30),
1737 * and all currently undefined.
1745 GEN_LOGICAL2(orc
, tcg_gen_orc_tl
, 0x0C, PPC_INTEGER
);
1748 static void gen_xor(DisasContext
*ctx
)
1750 /* Optimisation for "set to zero" case */
1751 if (rS(ctx
->opcode
) != rB(ctx
->opcode
))
1752 tcg_gen_xor_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1754 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
1755 if (unlikely(Rc(ctx
->opcode
) != 0))
1756 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1760 static void gen_ori(DisasContext
*ctx
)
1762 target_ulong uimm
= UIMM(ctx
->opcode
);
1764 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1767 tcg_gen_ori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
);
1771 static void gen_oris(DisasContext
*ctx
)
1773 target_ulong uimm
= UIMM(ctx
->opcode
);
1775 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1779 tcg_gen_ori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
<< 16);
1783 static void gen_xori(DisasContext
*ctx
)
1785 target_ulong uimm
= UIMM(ctx
->opcode
);
1787 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1791 tcg_gen_xori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
);
1795 static void gen_xoris(DisasContext
*ctx
)
1797 target_ulong uimm
= UIMM(ctx
->opcode
);
1799 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1803 tcg_gen_xori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
<< 16);
1806 /* popcntb : PowerPC 2.03 specification */
1807 static void gen_popcntb(DisasContext
*ctx
)
1809 gen_helper_popcntb(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1812 static void gen_popcntw(DisasContext
*ctx
)
1814 gen_helper_popcntw(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1817 #if defined(TARGET_PPC64)
1818 /* popcntd: PowerPC 2.06 specification */
1819 static void gen_popcntd(DisasContext
*ctx
)
1821 gen_helper_popcntd(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1825 /* prtyw: PowerPC 2.05 specification */
1826 static void gen_prtyw(DisasContext
*ctx
)
1828 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1829 TCGv rs
= cpu_gpr
[rS(ctx
->opcode
)];
1830 TCGv t0
= tcg_temp_new();
1831 tcg_gen_shri_tl(t0
, rs
, 16);
1832 tcg_gen_xor_tl(ra
, rs
, t0
);
1833 tcg_gen_shri_tl(t0
, ra
, 8);
1834 tcg_gen_xor_tl(ra
, ra
, t0
);
1835 tcg_gen_andi_tl(ra
, ra
, (target_ulong
)0x100000001ULL
);
1839 #if defined(TARGET_PPC64)
1840 /* prtyd: PowerPC 2.05 specification */
1841 static void gen_prtyd(DisasContext
*ctx
)
1843 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1844 TCGv rs
= cpu_gpr
[rS(ctx
->opcode
)];
1845 TCGv t0
= tcg_temp_new();
1846 tcg_gen_shri_tl(t0
, rs
, 32);
1847 tcg_gen_xor_tl(ra
, rs
, t0
);
1848 tcg_gen_shri_tl(t0
, ra
, 16);
1849 tcg_gen_xor_tl(ra
, ra
, t0
);
1850 tcg_gen_shri_tl(t0
, ra
, 8);
1851 tcg_gen_xor_tl(ra
, ra
, t0
);
1852 tcg_gen_andi_tl(ra
, ra
, 1);
1857 #if defined(TARGET_PPC64)
1859 static void gen_bpermd(DisasContext
*ctx
)
1861 gen_helper_bpermd(cpu_gpr
[rA(ctx
->opcode
)],
1862 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1866 #if defined(TARGET_PPC64)
1867 /* extsw & extsw. */
1868 GEN_LOGICAL1(extsw
, tcg_gen_ext32s_tl
, 0x1E, PPC_64B
);
1871 static void gen_cntlzd(DisasContext
*ctx
)
1873 gen_helper_cntlzd(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1874 if (unlikely(Rc(ctx
->opcode
) != 0))
1875 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1879 static void gen_cnttzd(DisasContext
*ctx
)
1881 gen_helper_cnttzd(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1882 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1883 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1888 /*** Integer rotate ***/
1890 /* rlwimi & rlwimi. */
1891 static void gen_rlwimi(DisasContext
*ctx
)
1893 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
1894 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
1895 uint32_t sh
= SH(ctx
->opcode
);
1896 uint32_t mb
= MB(ctx
->opcode
);
1897 uint32_t me
= ME(ctx
->opcode
);
1899 if (sh
== (31-me
) && mb
<= me
) {
1900 tcg_gen_deposit_tl(t_ra
, t_ra
, t_rs
, sh
, me
- mb
+ 1);
1905 #if defined(TARGET_PPC64)
1909 mask
= MASK(mb
, me
);
1911 t1
= tcg_temp_new();
1912 if (mask
<= 0xffffffffu
) {
1913 TCGv_i32 t0
= tcg_temp_new_i32();
1914 tcg_gen_trunc_tl_i32(t0
, t_rs
);
1915 tcg_gen_rotli_i32(t0
, t0
, sh
);
1916 tcg_gen_extu_i32_tl(t1
, t0
);
1917 tcg_temp_free_i32(t0
);
1919 #if defined(TARGET_PPC64)
1920 tcg_gen_deposit_i64(t1
, t_rs
, t_rs
, 32, 32);
1921 tcg_gen_rotli_i64(t1
, t1
, sh
);
1923 g_assert_not_reached();
1927 tcg_gen_andi_tl(t1
, t1
, mask
);
1928 tcg_gen_andi_tl(t_ra
, t_ra
, ~mask
);
1929 tcg_gen_or_tl(t_ra
, t_ra
, t1
);
1932 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1933 gen_set_Rc0(ctx
, t_ra
);
1937 /* rlwinm & rlwinm. */
1938 static void gen_rlwinm(DisasContext
*ctx
)
1940 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
1941 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
1942 uint32_t sh
= SH(ctx
->opcode
);
1943 uint32_t mb
= MB(ctx
->opcode
);
1944 uint32_t me
= ME(ctx
->opcode
);
1946 if (mb
== 0 && me
== (31 - sh
)) {
1947 tcg_gen_shli_tl(t_ra
, t_rs
, sh
);
1948 tcg_gen_ext32u_tl(t_ra
, t_ra
);
1949 } else if (sh
!= 0 && me
== 31 && sh
== (32 - mb
)) {
1950 tcg_gen_ext32u_tl(t_ra
, t_rs
);
1951 tcg_gen_shri_tl(t_ra
, t_ra
, mb
);
1954 #if defined(TARGET_PPC64)
1958 mask
= MASK(mb
, me
);
1960 if (mask
<= 0xffffffffu
) {
1961 TCGv_i32 t0
= tcg_temp_new_i32();
1962 tcg_gen_trunc_tl_i32(t0
, t_rs
);
1963 tcg_gen_rotli_i32(t0
, t0
, sh
);
1964 tcg_gen_andi_i32(t0
, t0
, mask
);
1965 tcg_gen_extu_i32_tl(t_ra
, t0
);
1966 tcg_temp_free_i32(t0
);
1968 #if defined(TARGET_PPC64)
1969 tcg_gen_deposit_i64(t_ra
, t_rs
, t_rs
, 32, 32);
1970 tcg_gen_rotli_i64(t_ra
, t_ra
, sh
);
1971 tcg_gen_andi_i64(t_ra
, t_ra
, mask
);
1973 g_assert_not_reached();
1977 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1978 gen_set_Rc0(ctx
, t_ra
);
1982 /* rlwnm & rlwnm. */
1983 static void gen_rlwnm(DisasContext
*ctx
)
1985 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
1986 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
1987 TCGv t_rb
= cpu_gpr
[rB(ctx
->opcode
)];
1988 uint32_t mb
= MB(ctx
->opcode
);
1989 uint32_t me
= ME(ctx
->opcode
);
1992 #if defined(TARGET_PPC64)
1996 mask
= MASK(mb
, me
);
1998 if (mask
<= 0xffffffffu
) {
1999 TCGv_i32 t0
= tcg_temp_new_i32();
2000 TCGv_i32 t1
= tcg_temp_new_i32();
2001 tcg_gen_trunc_tl_i32(t0
, t_rb
);
2002 tcg_gen_trunc_tl_i32(t1
, t_rs
);
2003 tcg_gen_andi_i32(t0
, t0
, 0x1f);
2004 tcg_gen_rotl_i32(t1
, t1
, t0
);
2005 tcg_gen_extu_i32_tl(t_ra
, t1
);
2006 tcg_temp_free_i32(t0
);
2007 tcg_temp_free_i32(t1
);
2009 #if defined(TARGET_PPC64)
2010 TCGv_i64 t0
= tcg_temp_new_i64();
2011 tcg_gen_andi_i64(t0
, t_rb
, 0x1f);
2012 tcg_gen_deposit_i64(t_ra
, t_rs
, t_rs
, 32, 32);
2013 tcg_gen_rotl_i64(t_ra
, t_ra
, t0
);
2014 tcg_temp_free_i64(t0
);
2016 g_assert_not_reached();
2020 tcg_gen_andi_tl(t_ra
, t_ra
, mask
);
2022 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2023 gen_set_Rc0(ctx
, t_ra
);
2027 #if defined(TARGET_PPC64)
2028 #define GEN_PPC64_R2(name, opc1, opc2) \
2029 static void glue(gen_, name##0)(DisasContext *ctx) \
2031 gen_##name(ctx, 0); \
2034 static void glue(gen_, name##1)(DisasContext *ctx) \
2036 gen_##name(ctx, 1); \
2038 #define GEN_PPC64_R4(name, opc1, opc2) \
2039 static void glue(gen_, name##0)(DisasContext *ctx) \
2041 gen_##name(ctx, 0, 0); \
2044 static void glue(gen_, name##1)(DisasContext *ctx) \
2046 gen_##name(ctx, 0, 1); \
2049 static void glue(gen_, name##2)(DisasContext *ctx) \
2051 gen_##name(ctx, 1, 0); \
2054 static void glue(gen_, name##3)(DisasContext *ctx) \
2056 gen_##name(ctx, 1, 1); \
2059 static void gen_rldinm(DisasContext
*ctx
, int mb
, int me
, int sh
)
2061 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
2062 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
2064 if (sh
!= 0 && mb
== 0 && me
== (63 - sh
)) {
2065 tcg_gen_shli_tl(t_ra
, t_rs
, sh
);
2066 } else if (sh
!= 0 && me
== 63 && sh
== (64 - mb
)) {
2067 tcg_gen_shri_tl(t_ra
, t_rs
, mb
);
2069 tcg_gen_rotli_tl(t_ra
, t_rs
, sh
);
2070 tcg_gen_andi_tl(t_ra
, t_ra
, MASK(mb
, me
));
2072 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2073 gen_set_Rc0(ctx
, t_ra
);
2077 /* rldicl - rldicl. */
2078 static inline void gen_rldicl(DisasContext
*ctx
, int mbn
, int shn
)
2082 sh
= SH(ctx
->opcode
) | (shn
<< 5);
2083 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
2084 gen_rldinm(ctx
, mb
, 63, sh
);
2086 GEN_PPC64_R4(rldicl
, 0x1E, 0x00);
2088 /* rldicr - rldicr. */
2089 static inline void gen_rldicr(DisasContext
*ctx
, int men
, int shn
)
2093 sh
= SH(ctx
->opcode
) | (shn
<< 5);
2094 me
= MB(ctx
->opcode
) | (men
<< 5);
2095 gen_rldinm(ctx
, 0, me
, sh
);
2097 GEN_PPC64_R4(rldicr
, 0x1E, 0x02);
2099 /* rldic - rldic. */
2100 static inline void gen_rldic(DisasContext
*ctx
, int mbn
, int shn
)
2104 sh
= SH(ctx
->opcode
) | (shn
<< 5);
2105 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
2106 gen_rldinm(ctx
, mb
, 63 - sh
, sh
);
2108 GEN_PPC64_R4(rldic
, 0x1E, 0x04);
2110 static void gen_rldnm(DisasContext
*ctx
, int mb
, int me
)
2112 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
2113 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
2114 TCGv t_rb
= cpu_gpr
[rB(ctx
->opcode
)];
2117 t0
= tcg_temp_new();
2118 tcg_gen_andi_tl(t0
, t_rb
, 0x3f);
2119 tcg_gen_rotl_tl(t_ra
, t_rs
, t0
);
2122 tcg_gen_andi_tl(t_ra
, t_ra
, MASK(mb
, me
));
2123 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2124 gen_set_Rc0(ctx
, t_ra
);
2128 /* rldcl - rldcl. */
2129 static inline void gen_rldcl(DisasContext
*ctx
, int mbn
)
2133 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
2134 gen_rldnm(ctx
, mb
, 63);
2136 GEN_PPC64_R2(rldcl
, 0x1E, 0x08);
2138 /* rldcr - rldcr. */
2139 static inline void gen_rldcr(DisasContext
*ctx
, int men
)
2143 me
= MB(ctx
->opcode
) | (men
<< 5);
2144 gen_rldnm(ctx
, 0, me
);
2146 GEN_PPC64_R2(rldcr
, 0x1E, 0x09);
2148 /* rldimi - rldimi. */
2149 static void gen_rldimi(DisasContext
*ctx
, int mbn
, int shn
)
2151 TCGv t_ra
= cpu_gpr
[rA(ctx
->opcode
)];
2152 TCGv t_rs
= cpu_gpr
[rS(ctx
->opcode
)];
2153 uint32_t sh
= SH(ctx
->opcode
) | (shn
<< 5);
2154 uint32_t mb
= MB(ctx
->opcode
) | (mbn
<< 5);
2155 uint32_t me
= 63 - sh
;
2158 tcg_gen_deposit_tl(t_ra
, t_ra
, t_rs
, sh
, me
- mb
+ 1);
2160 target_ulong mask
= MASK(mb
, me
);
2161 TCGv t1
= tcg_temp_new();
2163 tcg_gen_rotli_tl(t1
, t_rs
, sh
);
2164 tcg_gen_andi_tl(t1
, t1
, mask
);
2165 tcg_gen_andi_tl(t_ra
, t_ra
, ~mask
);
2166 tcg_gen_or_tl(t_ra
, t_ra
, t1
);
2169 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2170 gen_set_Rc0(ctx
, t_ra
);
2173 GEN_PPC64_R4(rldimi
, 0x1E, 0x06);
2176 /*** Integer shift ***/
2179 static void gen_slw(DisasContext
*ctx
)
2183 t0
= tcg_temp_new();
2184 /* AND rS with a mask that is 0 when rB >= 0x20 */
2185 #if defined(TARGET_PPC64)
2186 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x3a);
2187 tcg_gen_sari_tl(t0
, t0
, 0x3f);
2189 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1a);
2190 tcg_gen_sari_tl(t0
, t0
, 0x1f);
2192 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
2193 t1
= tcg_temp_new();
2194 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1f);
2195 tcg_gen_shl_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
2198 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
2199 if (unlikely(Rc(ctx
->opcode
) != 0))
2200 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2204 static void gen_sraw(DisasContext
*ctx
)
2206 gen_helper_sraw(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
2207 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
2208 if (unlikely(Rc(ctx
->opcode
) != 0))
2209 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2212 /* srawi & srawi. */
2213 static void gen_srawi(DisasContext
*ctx
)
2215 int sh
= SH(ctx
->opcode
);
2216 TCGv dst
= cpu_gpr
[rA(ctx
->opcode
)];
2217 TCGv src
= cpu_gpr
[rS(ctx
->opcode
)];
2219 tcg_gen_ext32s_tl(dst
, src
);
2220 tcg_gen_movi_tl(cpu_ca
, 0);
2223 tcg_gen_ext32s_tl(dst
, src
);
2224 tcg_gen_andi_tl(cpu_ca
, dst
, (1ULL << sh
) - 1);
2225 t0
= tcg_temp_new();
2226 tcg_gen_sari_tl(t0
, dst
, TARGET_LONG_BITS
- 1);
2227 tcg_gen_and_tl(cpu_ca
, cpu_ca
, t0
);
2229 tcg_gen_setcondi_tl(TCG_COND_NE
, cpu_ca
, cpu_ca
, 0);
2230 tcg_gen_sari_tl(dst
, dst
, sh
);
2232 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2233 gen_set_Rc0(ctx
, dst
);
2238 static void gen_srw(DisasContext
*ctx
)
2242 t0
= tcg_temp_new();
2243 /* AND rS with a mask that is 0 when rB >= 0x20 */
2244 #if defined(TARGET_PPC64)
2245 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x3a);
2246 tcg_gen_sari_tl(t0
, t0
, 0x3f);
2248 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1a);
2249 tcg_gen_sari_tl(t0
, t0
, 0x1f);
2251 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
2252 tcg_gen_ext32u_tl(t0
, t0
);
2253 t1
= tcg_temp_new();
2254 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1f);
2255 tcg_gen_shr_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
2258 if (unlikely(Rc(ctx
->opcode
) != 0))
2259 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2262 #if defined(TARGET_PPC64)
2264 static void gen_sld(DisasContext
*ctx
)
2268 t0
= tcg_temp_new();
2269 /* AND rS with a mask that is 0 when rB >= 0x40 */
2270 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x39);
2271 tcg_gen_sari_tl(t0
, t0
, 0x3f);
2272 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
2273 t1
= tcg_temp_new();
2274 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x3f);
2275 tcg_gen_shl_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
2278 if (unlikely(Rc(ctx
->opcode
) != 0))
2279 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2283 static void gen_srad(DisasContext
*ctx
)
2285 gen_helper_srad(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
2286 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
2287 if (unlikely(Rc(ctx
->opcode
) != 0))
2288 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2290 /* sradi & sradi. */
2291 static inline void gen_sradi(DisasContext
*ctx
, int n
)
2293 int sh
= SH(ctx
->opcode
) + (n
<< 5);
2294 TCGv dst
= cpu_gpr
[rA(ctx
->opcode
)];
2295 TCGv src
= cpu_gpr
[rS(ctx
->opcode
)];
2297 tcg_gen_mov_tl(dst
, src
);
2298 tcg_gen_movi_tl(cpu_ca
, 0);
2301 tcg_gen_andi_tl(cpu_ca
, src
, (1ULL << sh
) - 1);
2302 t0
= tcg_temp_new();
2303 tcg_gen_sari_tl(t0
, src
, TARGET_LONG_BITS
- 1);
2304 tcg_gen_and_tl(cpu_ca
, cpu_ca
, t0
);
2306 tcg_gen_setcondi_tl(TCG_COND_NE
, cpu_ca
, cpu_ca
, 0);
2307 tcg_gen_sari_tl(dst
, src
, sh
);
2309 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2310 gen_set_Rc0(ctx
, dst
);
2314 static void gen_sradi0(DisasContext
*ctx
)
2319 static void gen_sradi1(DisasContext
*ctx
)
2325 static void gen_srd(DisasContext
*ctx
)
2329 t0
= tcg_temp_new();
2330 /* AND rS with a mask that is 0 when rB >= 0x40 */
2331 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x39);
2332 tcg_gen_sari_tl(t0
, t0
, 0x3f);
2333 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
2334 t1
= tcg_temp_new();
2335 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x3f);
2336 tcg_gen_shr_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
2339 if (unlikely(Rc(ctx
->opcode
) != 0))
2340 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
2344 #if defined(TARGET_PPC64)
2345 static void gen_set_cr1_from_fpscr(DisasContext
*ctx
)
2347 TCGv_i32 tmp
= tcg_temp_new_i32();
2348 tcg_gen_trunc_tl_i32(tmp
, cpu_fpscr
);
2349 tcg_gen_shri_i32(cpu_crf
[1], tmp
, 28);
2350 tcg_temp_free_i32(tmp
);
2353 static void gen_set_cr1_from_fpscr(DisasContext
*ctx
)
2355 tcg_gen_shri_tl(cpu_crf
[1], cpu_fpscr
, 28);
2359 /*** Floating-Point arithmetic ***/
2360 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2361 static void gen_f##name(DisasContext *ctx) \
2363 if (unlikely(!ctx->fpu_enabled)) { \
2364 gen_exception(ctx, POWERPC_EXCP_FPU); \
2367 /* NIP cannot be restored if the memory exception comes from an helper */ \
2368 gen_update_nip(ctx, ctx->nip - 4); \
2369 gen_reset_fpstatus(); \
2370 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2371 cpu_fpr[rA(ctx->opcode)], \
2372 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
2374 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2375 cpu_fpr[rD(ctx->opcode)]); \
2378 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)]); \
2380 if (unlikely(Rc(ctx->opcode) != 0)) { \
2381 gen_set_cr1_from_fpscr(ctx); \
2385 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2386 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2387 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2389 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2390 static void gen_f##name(DisasContext *ctx) \
2392 if (unlikely(!ctx->fpu_enabled)) { \
2393 gen_exception(ctx, POWERPC_EXCP_FPU); \
2396 /* NIP cannot be restored if the memory exception comes from an helper */ \
2397 gen_update_nip(ctx, ctx->nip - 4); \
2398 gen_reset_fpstatus(); \
2399 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2400 cpu_fpr[rA(ctx->opcode)], \
2401 cpu_fpr[rB(ctx->opcode)]); \
2403 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2404 cpu_fpr[rD(ctx->opcode)]); \
2407 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)]); \
2409 if (unlikely(Rc(ctx->opcode) != 0)) { \
2410 gen_set_cr1_from_fpscr(ctx); \
2413 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2414 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2415 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2417 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2418 static void gen_f##name(DisasContext *ctx) \
2420 if (unlikely(!ctx->fpu_enabled)) { \
2421 gen_exception(ctx, POWERPC_EXCP_FPU); \
2424 /* NIP cannot be restored if the memory exception comes from an helper */ \
2425 gen_update_nip(ctx, ctx->nip - 4); \
2426 gen_reset_fpstatus(); \
2427 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2428 cpu_fpr[rA(ctx->opcode)], \
2429 cpu_fpr[rC(ctx->opcode)]); \
2431 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2432 cpu_fpr[rD(ctx->opcode)]); \
2435 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)]); \
2437 if (unlikely(Rc(ctx->opcode) != 0)) { \
2438 gen_set_cr1_from_fpscr(ctx); \
2441 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2442 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2443 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2445 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2446 static void gen_f##name(DisasContext *ctx) \
2448 if (unlikely(!ctx->fpu_enabled)) { \
2449 gen_exception(ctx, POWERPC_EXCP_FPU); \
2452 /* NIP cannot be restored if the memory exception comes from an helper */ \
2453 gen_update_nip(ctx, ctx->nip - 4); \
2454 gen_reset_fpstatus(); \
2455 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2456 cpu_fpr[rB(ctx->opcode)]); \
2458 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)]); \
2460 if (unlikely(Rc(ctx->opcode) != 0)) { \
2461 gen_set_cr1_from_fpscr(ctx); \
2465 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2466 static void gen_f##name(DisasContext *ctx) \
2468 if (unlikely(!ctx->fpu_enabled)) { \
2469 gen_exception(ctx, POWERPC_EXCP_FPU); \
2472 /* NIP cannot be restored if the memory exception comes from an helper */ \
2473 gen_update_nip(ctx, ctx->nip - 4); \
2474 gen_reset_fpstatus(); \
2475 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2476 cpu_fpr[rB(ctx->opcode)]); \
2478 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)]); \
2480 if (unlikely(Rc(ctx->opcode) != 0)) { \
2481 gen_set_cr1_from_fpscr(ctx); \
2486 GEN_FLOAT_AB(add
, 0x15, 0x000007C0, 1, PPC_FLOAT
);
2488 GEN_FLOAT_AB(div
, 0x12, 0x000007C0, 1, PPC_FLOAT
);
2490 GEN_FLOAT_AC(mul
, 0x19, 0x0000F800, 1, PPC_FLOAT
);
2493 GEN_FLOAT_BS(re
, 0x3F, 0x18, 1, PPC_FLOAT_EXT
);
2496 GEN_FLOAT_BS(res
, 0x3B, 0x18, 1, PPC_FLOAT_FRES
);
2499 GEN_FLOAT_BS(rsqrte
, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE
);
2502 static void gen_frsqrtes(DisasContext
*ctx
)
2504 if (unlikely(!ctx
->fpu_enabled
)) {
2505 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2508 /* NIP cannot be restored if the memory exception comes from an helper */
2509 gen_update_nip(ctx
, ctx
->nip
- 4);
2510 gen_reset_fpstatus();
2511 gen_helper_frsqrte(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2512 cpu_fpr
[rB(ctx
->opcode
)]);
2513 gen_helper_frsp(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2514 cpu_fpr
[rD(ctx
->opcode
)]);
2515 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)]);
2516 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2517 gen_set_cr1_from_fpscr(ctx
);
2522 _GEN_FLOAT_ACB(sel
, sel
, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL
);
2524 GEN_FLOAT_AB(sub
, 0x14, 0x000007C0, 1, PPC_FLOAT
);
2528 static void gen_fsqrt(DisasContext
*ctx
)
2530 if (unlikely(!ctx
->fpu_enabled
)) {
2531 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2534 /* NIP cannot be restored if the memory exception comes from an helper */
2535 gen_update_nip(ctx
, ctx
->nip
- 4);
2536 gen_reset_fpstatus();
2537 gen_helper_fsqrt(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2538 cpu_fpr
[rB(ctx
->opcode
)]);
2539 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)]);
2540 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2541 gen_set_cr1_from_fpscr(ctx
);
2545 static void gen_fsqrts(DisasContext
*ctx
)
2547 if (unlikely(!ctx
->fpu_enabled
)) {
2548 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2551 /* NIP cannot be restored if the memory exception comes from an helper */
2552 gen_update_nip(ctx
, ctx
->nip
- 4);
2553 gen_reset_fpstatus();
2554 gen_helper_fsqrt(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2555 cpu_fpr
[rB(ctx
->opcode
)]);
2556 gen_helper_frsp(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2557 cpu_fpr
[rD(ctx
->opcode
)]);
2558 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)]);
2559 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2560 gen_set_cr1_from_fpscr(ctx
);
2564 /*** Floating-Point multiply-and-add ***/
2565 /* fmadd - fmadds */
2566 GEN_FLOAT_ACB(madd
, 0x1D, 1, PPC_FLOAT
);
2567 /* fmsub - fmsubs */
2568 GEN_FLOAT_ACB(msub
, 0x1C, 1, PPC_FLOAT
);
2569 /* fnmadd - fnmadds */
2570 GEN_FLOAT_ACB(nmadd
, 0x1F, 1, PPC_FLOAT
);
2571 /* fnmsub - fnmsubs */
2572 GEN_FLOAT_ACB(nmsub
, 0x1E, 1, PPC_FLOAT
);
2574 /*** Floating-Point round & convert ***/
2576 GEN_FLOAT_B(ctiw
, 0x0E, 0x00, 0, PPC_FLOAT
);
2578 GEN_FLOAT_B(ctiwu
, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206
);
2580 GEN_FLOAT_B(ctiwz
, 0x0F, 0x00, 0, PPC_FLOAT
);
2582 GEN_FLOAT_B(ctiwuz
, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206
);
2584 GEN_FLOAT_B(rsp
, 0x0C, 0x00, 1, PPC_FLOAT
);
2586 GEN_FLOAT_B(cfid
, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64
);
2588 GEN_FLOAT_B(cfids
, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206
);
2590 GEN_FLOAT_B(cfidu
, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206
);
2592 GEN_FLOAT_B(cfidus
, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206
);
2594 GEN_FLOAT_B(ctid
, 0x0E, 0x19, 0, PPC2_FP_CVT_S64
);
2596 GEN_FLOAT_B(ctidu
, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206
);
2598 GEN_FLOAT_B(ctidz
, 0x0F, 0x19, 0, PPC2_FP_CVT_S64
);
2600 GEN_FLOAT_B(ctiduz
, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206
);
2603 GEN_FLOAT_B(rin
, 0x08, 0x0C, 1, PPC_FLOAT_EXT
);
2605 GEN_FLOAT_B(riz
, 0x08, 0x0D, 1, PPC_FLOAT_EXT
);
2607 GEN_FLOAT_B(rip
, 0x08, 0x0E, 1, PPC_FLOAT_EXT
);
2609 GEN_FLOAT_B(rim
, 0x08, 0x0F, 1, PPC_FLOAT_EXT
);
2611 static void gen_ftdiv(DisasContext
*ctx
)
2613 if (unlikely(!ctx
->fpu_enabled
)) {
2614 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2617 gen_helper_ftdiv(cpu_crf
[crfD(ctx
->opcode
)], cpu_fpr
[rA(ctx
->opcode
)],
2618 cpu_fpr
[rB(ctx
->opcode
)]);
2621 static void gen_ftsqrt(DisasContext
*ctx
)
2623 if (unlikely(!ctx
->fpu_enabled
)) {
2624 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2627 gen_helper_ftsqrt(cpu_crf
[crfD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)]);
2632 /*** Floating-Point compare ***/
2635 static void gen_fcmpo(DisasContext
*ctx
)
2638 if (unlikely(!ctx
->fpu_enabled
)) {
2639 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2642 /* NIP cannot be restored if the memory exception comes from an helper */
2643 gen_update_nip(ctx
, ctx
->nip
- 4);
2644 gen_reset_fpstatus();
2645 crf
= tcg_const_i32(crfD(ctx
->opcode
));
2646 gen_helper_fcmpo(cpu_env
, cpu_fpr
[rA(ctx
->opcode
)],
2647 cpu_fpr
[rB(ctx
->opcode
)], crf
);
2648 tcg_temp_free_i32(crf
);
2649 gen_helper_float_check_status(cpu_env
);
2653 static void gen_fcmpu(DisasContext
*ctx
)
2656 if (unlikely(!ctx
->fpu_enabled
)) {
2657 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2660 /* NIP cannot be restored if the memory exception comes from an helper */
2661 gen_update_nip(ctx
, ctx
->nip
- 4);
2662 gen_reset_fpstatus();
2663 crf
= tcg_const_i32(crfD(ctx
->opcode
));
2664 gen_helper_fcmpu(cpu_env
, cpu_fpr
[rA(ctx
->opcode
)],
2665 cpu_fpr
[rB(ctx
->opcode
)], crf
);
2666 tcg_temp_free_i32(crf
);
2667 gen_helper_float_check_status(cpu_env
);
2670 /*** Floating-point move ***/
2672 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2673 static void gen_fabs(DisasContext
*ctx
)
2675 if (unlikely(!ctx
->fpu_enabled
)) {
2676 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2679 tcg_gen_andi_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2681 if (unlikely(Rc(ctx
->opcode
))) {
2682 gen_set_cr1_from_fpscr(ctx
);
2687 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2688 static void gen_fmr(DisasContext
*ctx
)
2690 if (unlikely(!ctx
->fpu_enabled
)) {
2691 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2694 tcg_gen_mov_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)]);
2695 if (unlikely(Rc(ctx
->opcode
))) {
2696 gen_set_cr1_from_fpscr(ctx
);
2701 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2702 static void gen_fnabs(DisasContext
*ctx
)
2704 if (unlikely(!ctx
->fpu_enabled
)) {
2705 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2708 tcg_gen_ori_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2710 if (unlikely(Rc(ctx
->opcode
))) {
2711 gen_set_cr1_from_fpscr(ctx
);
2716 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2717 static void gen_fneg(DisasContext
*ctx
)
2719 if (unlikely(!ctx
->fpu_enabled
)) {
2720 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2723 tcg_gen_xori_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2725 if (unlikely(Rc(ctx
->opcode
))) {
2726 gen_set_cr1_from_fpscr(ctx
);
2730 /* fcpsgn: PowerPC 2.05 specification */
2731 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
2732 static void gen_fcpsgn(DisasContext
*ctx
)
2734 if (unlikely(!ctx
->fpu_enabled
)) {
2735 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2738 tcg_gen_deposit_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rA(ctx
->opcode
)],
2739 cpu_fpr
[rB(ctx
->opcode
)], 0, 63);
2740 if (unlikely(Rc(ctx
->opcode
))) {
2741 gen_set_cr1_from_fpscr(ctx
);
2745 static void gen_fmrgew(DisasContext
*ctx
)
2748 if (unlikely(!ctx
->fpu_enabled
)) {
2749 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2752 b0
= tcg_temp_new_i64();
2753 tcg_gen_shri_i64(b0
, cpu_fpr
[rB(ctx
->opcode
)], 32);
2754 tcg_gen_deposit_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rA(ctx
->opcode
)],
2756 tcg_temp_free_i64(b0
);
2759 static void gen_fmrgow(DisasContext
*ctx
)
2761 if (unlikely(!ctx
->fpu_enabled
)) {
2762 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2765 tcg_gen_deposit_i64(cpu_fpr
[rD(ctx
->opcode
)],
2766 cpu_fpr
[rB(ctx
->opcode
)],
2767 cpu_fpr
[rA(ctx
->opcode
)],
2771 /*** Floating-Point status & ctrl register ***/
2774 static void gen_mcrfs(DisasContext
*ctx
)
2776 TCGv tmp
= tcg_temp_new();
2778 TCGv_i64 tnew_fpscr
= tcg_temp_new_i64();
2783 if (unlikely(!ctx
->fpu_enabled
)) {
2784 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2787 bfa
= crfS(ctx
->opcode
);
2790 tcg_gen_shri_tl(tmp
, cpu_fpscr
, shift
);
2791 tcg_gen_trunc_tl_i32(cpu_crf
[crfD(ctx
->opcode
)], tmp
);
2792 tcg_gen_andi_i32(cpu_crf
[crfD(ctx
->opcode
)], cpu_crf
[crfD(ctx
->opcode
)], 0xf);
2794 tcg_gen_extu_tl_i64(tnew_fpscr
, cpu_fpscr
);
2795 /* Only the exception bits (including FX) should be cleared if read */
2796 tcg_gen_andi_i64(tnew_fpscr
, tnew_fpscr
, ~((0xF << shift
) & FP_EX_CLEAR_BITS
));
2797 /* FEX and VX need to be updated, so don't set fpscr directly */
2798 tmask
= tcg_const_i32(1 << nibble
);
2799 gen_helper_store_fpscr(cpu_env
, tnew_fpscr
, tmask
);
2800 tcg_temp_free_i32(tmask
);
2801 tcg_temp_free_i64(tnew_fpscr
);
2805 static void gen_mffs(DisasContext
*ctx
)
2807 if (unlikely(!ctx
->fpu_enabled
)) {
2808 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2811 gen_reset_fpstatus();
2812 tcg_gen_extu_tl_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpscr
);
2813 if (unlikely(Rc(ctx
->opcode
))) {
2814 gen_set_cr1_from_fpscr(ctx
);
2819 static void gen_mtfsb0(DisasContext
*ctx
)
2823 if (unlikely(!ctx
->fpu_enabled
)) {
2824 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2827 crb
= 31 - crbD(ctx
->opcode
);
2828 gen_reset_fpstatus();
2829 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
)) {
2831 /* NIP cannot be restored if the memory exception comes from an helper */
2832 gen_update_nip(ctx
, ctx
->nip
- 4);
2833 t0
= tcg_const_i32(crb
);
2834 gen_helper_fpscr_clrbit(cpu_env
, t0
);
2835 tcg_temp_free_i32(t0
);
2837 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2838 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2839 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2844 static void gen_mtfsb1(DisasContext
*ctx
)
2848 if (unlikely(!ctx
->fpu_enabled
)) {
2849 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2852 crb
= 31 - crbD(ctx
->opcode
);
2853 gen_reset_fpstatus();
2854 /* XXX: we pretend we can only do IEEE floating-point computations */
2855 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
&& crb
!= FPSCR_NI
)) {
2857 /* NIP cannot be restored if the memory exception comes from an helper */
2858 gen_update_nip(ctx
, ctx
->nip
- 4);
2859 t0
= tcg_const_i32(crb
);
2860 gen_helper_fpscr_setbit(cpu_env
, t0
);
2861 tcg_temp_free_i32(t0
);
2863 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2864 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2865 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2867 /* We can raise a differed exception */
2868 gen_helper_float_check_status(cpu_env
);
2872 static void gen_mtfsf(DisasContext
*ctx
)
2877 if (unlikely(!ctx
->fpu_enabled
)) {
2878 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2881 flm
= FPFLM(ctx
->opcode
);
2882 l
= FPL(ctx
->opcode
);
2883 w
= FPW(ctx
->opcode
);
2884 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
2885 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2888 /* NIP cannot be restored if the memory exception comes from an helper */
2889 gen_update_nip(ctx
, ctx
->nip
- 4);
2890 gen_reset_fpstatus();
2892 t0
= tcg_const_i32((ctx
->insns_flags2
& PPC2_ISA205
) ? 0xffff : 0xff);
2894 t0
= tcg_const_i32(flm
<< (w
* 8));
2896 gen_helper_store_fpscr(cpu_env
, cpu_fpr
[rB(ctx
->opcode
)], t0
);
2897 tcg_temp_free_i32(t0
);
2898 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2899 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2900 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2902 /* We can raise a differed exception */
2903 gen_helper_float_check_status(cpu_env
);
2907 static void gen_mtfsfi(DisasContext
*ctx
)
2913 if (unlikely(!ctx
->fpu_enabled
)) {
2914 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2917 w
= FPW(ctx
->opcode
);
2918 bf
= FPBF(ctx
->opcode
);
2919 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
2920 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2923 sh
= (8 * w
) + 7 - bf
;
2924 /* NIP cannot be restored if the memory exception comes from an helper */
2925 gen_update_nip(ctx
, ctx
->nip
- 4);
2926 gen_reset_fpstatus();
2927 t0
= tcg_const_i64(((uint64_t)FPIMM(ctx
->opcode
)) << (4 * sh
));
2928 t1
= tcg_const_i32(1 << sh
);
2929 gen_helper_store_fpscr(cpu_env
, t0
, t1
);
2930 tcg_temp_free_i64(t0
);
2931 tcg_temp_free_i32(t1
);
2932 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2933 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2934 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2936 /* We can raise a differed exception */
2937 gen_helper_float_check_status(cpu_env
);
2940 /*** Addressing modes ***/
2941 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2942 static inline void gen_addr_imm_index(DisasContext
*ctx
, TCGv EA
,
2945 target_long simm
= SIMM(ctx
->opcode
);
2948 if (rA(ctx
->opcode
) == 0) {
2949 if (NARROW_MODE(ctx
)) {
2950 simm
= (uint32_t)simm
;
2952 tcg_gen_movi_tl(EA
, simm
);
2953 } else if (likely(simm
!= 0)) {
2954 tcg_gen_addi_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], simm
);
2955 if (NARROW_MODE(ctx
)) {
2956 tcg_gen_ext32u_tl(EA
, EA
);
2959 if (NARROW_MODE(ctx
)) {
2960 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2962 tcg_gen_mov_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2967 static inline void gen_addr_reg_index(DisasContext
*ctx
, TCGv EA
)
2969 if (rA(ctx
->opcode
) == 0) {
2970 if (NARROW_MODE(ctx
)) {
2971 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rB(ctx
->opcode
)]);
2973 tcg_gen_mov_tl(EA
, cpu_gpr
[rB(ctx
->opcode
)]);
2976 tcg_gen_add_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
2977 if (NARROW_MODE(ctx
)) {
2978 tcg_gen_ext32u_tl(EA
, EA
);
2983 static inline void gen_addr_register(DisasContext
*ctx
, TCGv EA
)
2985 if (rA(ctx
->opcode
) == 0) {
2986 tcg_gen_movi_tl(EA
, 0);
2987 } else if (NARROW_MODE(ctx
)) {
2988 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2990 tcg_gen_mov_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2994 static inline void gen_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
2997 tcg_gen_addi_tl(ret
, arg1
, val
);
2998 if (NARROW_MODE(ctx
)) {
2999 tcg_gen_ext32u_tl(ret
, ret
);
3003 static inline void gen_check_align(DisasContext
*ctx
, TCGv EA
, int mask
)
3005 TCGLabel
*l1
= gen_new_label();
3006 TCGv t0
= tcg_temp_new();
3008 /* NIP cannot be restored if the memory exception comes from an helper */
3009 gen_update_nip(ctx
, ctx
->nip
- 4);
3010 tcg_gen_andi_tl(t0
, EA
, mask
);
3011 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
3012 t1
= tcg_const_i32(POWERPC_EXCP_ALIGN
);
3013 t2
= tcg_const_i32(0);
3014 gen_helper_raise_exception_err(cpu_env
, t1
, t2
);
3015 tcg_temp_free_i32(t1
);
3016 tcg_temp_free_i32(t2
);
3021 /*** Integer load ***/
3022 static inline void gen_qemu_ld8u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3024 tcg_gen_qemu_ld8u(arg1
, arg2
, ctx
->mem_idx
);
3027 static inline void gen_qemu_ld16u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3029 TCGMemOp op
= MO_UW
| ctx
->default_tcg_memop_mask
;
3030 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3033 static inline void gen_qemu_ld16s(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3035 TCGMemOp op
= MO_SW
| ctx
->default_tcg_memop_mask
;
3036 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3039 static inline void gen_qemu_ld32u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3041 TCGMemOp op
= MO_UL
| ctx
->default_tcg_memop_mask
;
3042 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3045 static void gen_qemu_ld32u_i64(DisasContext
*ctx
, TCGv_i64 val
, TCGv addr
)
3047 TCGv tmp
= tcg_temp_new();
3048 gen_qemu_ld32u(ctx
, tmp
, addr
);
3049 tcg_gen_extu_tl_i64(val
, tmp
);
3053 static inline void gen_qemu_ld32s(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3055 TCGMemOp op
= MO_SL
| ctx
->default_tcg_memop_mask
;
3056 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3059 static void gen_qemu_ld32s_i64(DisasContext
*ctx
, TCGv_i64 val
, TCGv addr
)
3061 TCGv tmp
= tcg_temp_new();
3062 gen_qemu_ld32s(ctx
, tmp
, addr
);
3063 tcg_gen_ext_tl_i64(val
, tmp
);
3067 static inline void gen_qemu_ld64(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3069 TCGMemOp op
= MO_Q
| ctx
->default_tcg_memop_mask
;
3070 tcg_gen_qemu_ld_i64(arg1
, arg2
, ctx
->mem_idx
, op
);
3073 static inline void gen_qemu_st8(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3075 tcg_gen_qemu_st8(arg1
, arg2
, ctx
->mem_idx
);
3078 static inline void gen_qemu_st16(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3080 TCGMemOp op
= MO_UW
| ctx
->default_tcg_memop_mask
;
3081 tcg_gen_qemu_st_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3084 static inline void gen_qemu_st32(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3086 TCGMemOp op
= MO_UL
| ctx
->default_tcg_memop_mask
;
3087 tcg_gen_qemu_st_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3090 static void gen_qemu_st32_i64(DisasContext
*ctx
, TCGv_i64 val
, TCGv addr
)
3092 TCGv tmp
= tcg_temp_new();
3093 tcg_gen_trunc_i64_tl(tmp
, val
);
3094 gen_qemu_st32(ctx
, tmp
, addr
);
3098 static inline void gen_qemu_st64(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3100 TCGMemOp op
= MO_Q
| ctx
->default_tcg_memop_mask
;
3101 tcg_gen_qemu_st_i64(arg1
, arg2
, ctx
->mem_idx
, op
);
3104 #define GEN_LD(name, ldop, opc, type) \
3105 static void glue(gen_, name)(DisasContext *ctx) \
3108 gen_set_access_type(ctx, ACCESS_INT); \
3109 EA = tcg_temp_new(); \
3110 gen_addr_imm_index(ctx, EA, 0); \
3111 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3112 tcg_temp_free(EA); \
3115 #define GEN_LDU(name, ldop, opc, type) \
3116 static void glue(gen_, name##u)(DisasContext *ctx) \
3119 if (unlikely(rA(ctx->opcode) == 0 || \
3120 rA(ctx->opcode) == rD(ctx->opcode))) { \
3121 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3124 gen_set_access_type(ctx, ACCESS_INT); \
3125 EA = tcg_temp_new(); \
3126 if (type == PPC_64B) \
3127 gen_addr_imm_index(ctx, EA, 0x03); \
3129 gen_addr_imm_index(ctx, EA, 0); \
3130 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3132 tcg_temp_free(EA); \
3135 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
3136 static void glue(gen_, name##ux)(DisasContext *ctx) \
3139 if (unlikely(rA(ctx->opcode) == 0 || \
3140 rA(ctx->opcode) == rD(ctx->opcode))) { \
3141 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3144 gen_set_access_type(ctx, ACCESS_INT); \
3145 EA = tcg_temp_new(); \
3146 gen_addr_reg_index(ctx, EA); \
3147 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3148 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3149 tcg_temp_free(EA); \
3152 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3153 static void glue(gen_, name##x)(DisasContext *ctx) \
3157 gen_set_access_type(ctx, ACCESS_INT); \
3158 EA = tcg_temp_new(); \
3159 gen_addr_reg_index(ctx, EA); \
3160 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3161 tcg_temp_free(EA); \
3164 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3165 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3167 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3168 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3170 #define GEN_LDS(name, ldop, op, type) \
3171 GEN_LD(name, ldop, op | 0x20, type); \
3172 GEN_LDU(name, ldop, op | 0x21, type); \
3173 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
3174 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
3176 /* lbz lbzu lbzux lbzx */
3177 GEN_LDS(lbz
, ld8u
, 0x02, PPC_INTEGER
);
3178 /* lha lhau lhaux lhax */
3179 GEN_LDS(lha
, ld16s
, 0x0A, PPC_INTEGER
);
3180 /* lhz lhzu lhzux lhzx */
3181 GEN_LDS(lhz
, ld16u
, 0x08, PPC_INTEGER
);
3182 /* lwz lwzu lwzux lwzx */
3183 GEN_LDS(lwz
, ld32u
, 0x00, PPC_INTEGER
);
3184 #if defined(TARGET_PPC64)
3186 GEN_LDUX(lwa
, ld32s
, 0x15, 0x0B, PPC_64B
);
3188 GEN_LDX(lwa
, ld32s
, 0x15, 0x0A, PPC_64B
);
3190 GEN_LDUX(ld
, ld64
, 0x15, 0x01, PPC_64B
);
3192 GEN_LDX(ld
, ld64
, 0x15, 0x00, PPC_64B
);
3194 /* CI load/store variants */
3195 GEN_LDX_HVRM(ldcix
, ld64
, 0x15, 0x1b, PPC_CILDST
)
3196 GEN_LDX_HVRM(lwzcix
, ld32u
, 0x15, 0x15, PPC_CILDST
)
3197 GEN_LDX_HVRM(lhzcix
, ld16u
, 0x15, 0x19, PPC_CILDST
)
3198 GEN_LDX_HVRM(lbzcix
, ld8u
, 0x15, 0x1a, PPC_CILDST
)
3200 static void gen_ld(DisasContext
*ctx
)
3203 if (Rc(ctx
->opcode
)) {
3204 if (unlikely(rA(ctx
->opcode
) == 0 ||
3205 rA(ctx
->opcode
) == rD(ctx
->opcode
))) {
3206 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3210 gen_set_access_type(ctx
, ACCESS_INT
);
3211 EA
= tcg_temp_new();
3212 gen_addr_imm_index(ctx
, EA
, 0x03);
3213 if (ctx
->opcode
& 0x02) {
3214 /* lwa (lwau is undefined) */
3215 gen_qemu_ld32s(ctx
, cpu_gpr
[rD(ctx
->opcode
)], EA
);
3218 gen_qemu_ld64(ctx
, cpu_gpr
[rD(ctx
->opcode
)], EA
);
3220 if (Rc(ctx
->opcode
))
3221 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], EA
);
3226 static void gen_lq(DisasContext
*ctx
)
3231 /* lq is a legal user mode instruction starting in ISA 2.07 */
3232 bool legal_in_user_mode
= (ctx
->insns_flags2
& PPC2_LSQ_ISA207
) != 0;
3233 bool le_is_supported
= (ctx
->insns_flags2
& PPC2_LSQ_ISA207
) != 0;
3235 if (!legal_in_user_mode
&& ctx
->pr
) {
3236 gen_priv_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3240 if (!le_is_supported
&& ctx
->le_mode
) {
3241 gen_exception_err(ctx
, POWERPC_EXCP_ALIGN
, POWERPC_EXCP_ALIGN_LE
);
3245 ra
= rA(ctx
->opcode
);
3246 rd
= rD(ctx
->opcode
);
3247 if (unlikely((rd
& 1) || rd
== ra
)) {
3248 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3252 gen_set_access_type(ctx
, ACCESS_INT
);
3253 EA
= tcg_temp_new();
3254 gen_addr_imm_index(ctx
, EA
, 0x0F);
3256 /* We only need to swap high and low halves. gen_qemu_ld64 does necessary
3257 64-bit byteswap already. */
3258 if (unlikely(ctx
->le_mode
)) {
3259 gen_qemu_ld64(ctx
, cpu_gpr
[rd
+1], EA
);
3260 gen_addr_add(ctx
, EA
, EA
, 8);
3261 gen_qemu_ld64(ctx
, cpu_gpr
[rd
], EA
);
3263 gen_qemu_ld64(ctx
, cpu_gpr
[rd
], EA
);
3264 gen_addr_add(ctx
, EA
, EA
, 8);
3265 gen_qemu_ld64(ctx
, cpu_gpr
[rd
+1], EA
);
3271 /*** Integer store ***/
3272 #define GEN_ST(name, stop, opc, type) \
3273 static void glue(gen_, name)(DisasContext *ctx) \
3276 gen_set_access_type(ctx, ACCESS_INT); \
3277 EA = tcg_temp_new(); \
3278 gen_addr_imm_index(ctx, EA, 0); \
3279 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3280 tcg_temp_free(EA); \
3283 #define GEN_STU(name, stop, opc, type) \
3284 static void glue(gen_, stop##u)(DisasContext *ctx) \
3287 if (unlikely(rA(ctx->opcode) == 0)) { \
3288 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3291 gen_set_access_type(ctx, ACCESS_INT); \
3292 EA = tcg_temp_new(); \
3293 if (type == PPC_64B) \
3294 gen_addr_imm_index(ctx, EA, 0x03); \
3296 gen_addr_imm_index(ctx, EA, 0); \
3297 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3298 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3299 tcg_temp_free(EA); \
3302 #define GEN_STUX(name, stop, opc2, opc3, type) \
3303 static void glue(gen_, name##ux)(DisasContext *ctx) \
3306 if (unlikely(rA(ctx->opcode) == 0)) { \
3307 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3310 gen_set_access_type(ctx, ACCESS_INT); \
3311 EA = tcg_temp_new(); \
3312 gen_addr_reg_index(ctx, EA); \
3313 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3314 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3315 tcg_temp_free(EA); \
3318 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3319 static void glue(gen_, name##x)(DisasContext *ctx) \
3323 gen_set_access_type(ctx, ACCESS_INT); \
3324 EA = tcg_temp_new(); \
3325 gen_addr_reg_index(ctx, EA); \
3326 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3327 tcg_temp_free(EA); \
3329 #define GEN_STX(name, stop, opc2, opc3, type) \
3330 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3332 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3333 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3335 #define GEN_STS(name, stop, op, type) \
3336 GEN_ST(name, stop, op | 0x20, type); \
3337 GEN_STU(name, stop, op | 0x21, type); \
3338 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
3339 GEN_STX(name, stop, 0x17, op | 0x00, type)
3341 /* stb stbu stbux stbx */
3342 GEN_STS(stb
, st8
, 0x06, PPC_INTEGER
);
3343 /* sth sthu sthux sthx */
3344 GEN_STS(sth
, st16
, 0x0C, PPC_INTEGER
);
3345 /* stw stwu stwux stwx */
3346 GEN_STS(stw
, st32
, 0x04, PPC_INTEGER
);
3347 #if defined(TARGET_PPC64)
3348 GEN_STUX(std
, st64
, 0x15, 0x05, PPC_64B
);
3349 GEN_STX(std
, st64
, 0x15, 0x04, PPC_64B
);
3350 GEN_STX_HVRM(stdcix
, st64
, 0x15, 0x1f, PPC_CILDST
)
3351 GEN_STX_HVRM(stwcix
, st32
, 0x15, 0x1c, PPC_CILDST
)
3352 GEN_STX_HVRM(sthcix
, st16
, 0x15, 0x1d, PPC_CILDST
)
3353 GEN_STX_HVRM(stbcix
, st8
, 0x15, 0x1e, PPC_CILDST
)
3355 static void gen_std(DisasContext
*ctx
)
3360 rs
= rS(ctx
->opcode
);
3361 if ((ctx
->opcode
& 0x3) == 0x2) { /* stq */
3362 bool legal_in_user_mode
= (ctx
->insns_flags2
& PPC2_LSQ_ISA207
) != 0;
3363 bool le_is_supported
= (ctx
->insns_flags2
& PPC2_LSQ_ISA207
) != 0;
3365 if (!(ctx
->insns_flags
& PPC_64BX
)) {
3366 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3369 if (!legal_in_user_mode
&& ctx
->pr
) {
3370 gen_priv_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3374 if (!le_is_supported
&& ctx
->le_mode
) {
3375 gen_exception_err(ctx
, POWERPC_EXCP_ALIGN
, POWERPC_EXCP_ALIGN_LE
);
3379 if (unlikely(rs
& 1)) {
3380 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3383 gen_set_access_type(ctx
, ACCESS_INT
);
3384 EA
= tcg_temp_new();
3385 gen_addr_imm_index(ctx
, EA
, 0x03);
3387 /* We only need to swap high and low halves. gen_qemu_st64 does
3388 necessary 64-bit byteswap already. */
3389 if (unlikely(ctx
->le_mode
)) {
3390 gen_qemu_st64(ctx
, cpu_gpr
[rs
+1], EA
);
3391 gen_addr_add(ctx
, EA
, EA
, 8);
3392 gen_qemu_st64(ctx
, cpu_gpr
[rs
], EA
);
3394 gen_qemu_st64(ctx
, cpu_gpr
[rs
], EA
);
3395 gen_addr_add(ctx
, EA
, EA
, 8);
3396 gen_qemu_st64(ctx
, cpu_gpr
[rs
+1], EA
);
3401 if (Rc(ctx
->opcode
)) {
3402 if (unlikely(rA(ctx
->opcode
) == 0)) {
3403 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3407 gen_set_access_type(ctx
, ACCESS_INT
);
3408 EA
= tcg_temp_new();
3409 gen_addr_imm_index(ctx
, EA
, 0x03);
3410 gen_qemu_st64(ctx
, cpu_gpr
[rs
], EA
);
3411 if (Rc(ctx
->opcode
))
3412 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], EA
);
3417 /*** Integer load and store with byte reverse ***/
3420 static inline void gen_qemu_ld16ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3422 TCGMemOp op
= MO_UW
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3423 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3425 GEN_LDX(lhbr
, ld16ur
, 0x16, 0x18, PPC_INTEGER
);
3428 static inline void gen_qemu_ld32ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3430 TCGMemOp op
= MO_UL
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3431 tcg_gen_qemu_ld_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3433 GEN_LDX(lwbr
, ld32ur
, 0x16, 0x10, PPC_INTEGER
);
3435 #if defined(TARGET_PPC64)
3437 static inline void gen_qemu_ld64ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3439 TCGMemOp op
= MO_Q
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3440 tcg_gen_qemu_ld_i64(arg1
, arg2
, ctx
->mem_idx
, op
);
3442 GEN_LDX_E(ldbr
, ld64ur
, 0x14, 0x10, PPC_NONE
, PPC2_DBRX
, CHK_NONE
);
3443 #endif /* TARGET_PPC64 */
3446 static inline void gen_qemu_st16r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3448 TCGMemOp op
= MO_UW
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3449 tcg_gen_qemu_st_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3451 GEN_STX(sthbr
, st16r
, 0x16, 0x1C, PPC_INTEGER
);
3454 static inline void gen_qemu_st32r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3456 TCGMemOp op
= MO_UL
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3457 tcg_gen_qemu_st_tl(arg1
, arg2
, ctx
->mem_idx
, op
);
3459 GEN_STX(stwbr
, st32r
, 0x16, 0x14, PPC_INTEGER
);
3461 #if defined(TARGET_PPC64)
3463 static inline void gen_qemu_st64r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
3465 TCGMemOp op
= MO_Q
| (ctx
->default_tcg_memop_mask
^ MO_BSWAP
);
3466 tcg_gen_qemu_st_i64(arg1
, arg2
, ctx
->mem_idx
, op
);
3468 GEN_STX_E(stdbr
, st64r
, 0x14, 0x14, PPC_NONE
, PPC2_DBRX
, CHK_NONE
);
3469 #endif /* TARGET_PPC64 */
3471 /*** Integer load and store multiple ***/
3474 static void gen_lmw(DisasContext
*ctx
)
3478 gen_set_access_type(ctx
, ACCESS_INT
);
3479 /* NIP cannot be restored if the memory exception comes from an helper */
3480 gen_update_nip(ctx
, ctx
->nip
- 4);
3481 t0
= tcg_temp_new();
3482 t1
= tcg_const_i32(rD(ctx
->opcode
));
3483 gen_addr_imm_index(ctx
, t0
, 0);
3484 gen_helper_lmw(cpu_env
, t0
, t1
);
3486 tcg_temp_free_i32(t1
);
3490 static void gen_stmw(DisasContext
*ctx
)
3494 gen_set_access_type(ctx
, ACCESS_INT
);
3495 /* NIP cannot be restored if the memory exception comes from an helper */
3496 gen_update_nip(ctx
, ctx
->nip
- 4);
3497 t0
= tcg_temp_new();
3498 t1
= tcg_const_i32(rS(ctx
->opcode
));
3499 gen_addr_imm_index(ctx
, t0
, 0);
3500 gen_helper_stmw(cpu_env
, t0
, t1
);
3502 tcg_temp_free_i32(t1
);
3505 /*** Integer load and store strings ***/
3508 /* PowerPC32 specification says we must generate an exception if
3509 * rA is in the range of registers to be loaded.
3510 * In an other hand, IBM says this is valid, but rA won't be loaded.
3511 * For now, I'll follow the spec...
3513 static void gen_lswi(DisasContext
*ctx
)
3517 int nb
= NB(ctx
->opcode
);
3518 int start
= rD(ctx
->opcode
);
3519 int ra
= rA(ctx
->opcode
);
3525 if (unlikely(lsw_reg_in_range(start
, nr
, ra
))) {
3526 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
3529 gen_set_access_type(ctx
, ACCESS_INT
);
3530 /* NIP cannot be restored if the memory exception comes from an helper */
3531 gen_update_nip(ctx
, ctx
->nip
- 4);
3532 t0
= tcg_temp_new();
3533 gen_addr_register(ctx
, t0
);
3534 t1
= tcg_const_i32(nb
);
3535 t2
= tcg_const_i32(start
);
3536 gen_helper_lsw(cpu_env
, t0
, t1
, t2
);
3538 tcg_temp_free_i32(t1
);
3539 tcg_temp_free_i32(t2
);
3543 static void gen_lswx(DisasContext
*ctx
)
3546 TCGv_i32 t1
, t2
, t3
;
3547 gen_set_access_type(ctx
, ACCESS_INT
);
3548 /* NIP cannot be restored if the memory exception comes from an helper */
3549 gen_update_nip(ctx
, ctx
->nip
- 4);
3550 t0
= tcg_temp_new();
3551 gen_addr_reg_index(ctx
, t0
);
3552 t1
= tcg_const_i32(rD(ctx
->opcode
));
3553 t2
= tcg_const_i32(rA(ctx
->opcode
));
3554 t3
= tcg_const_i32(rB(ctx
->opcode
));
3555 gen_helper_lswx(cpu_env
, t0
, t1
, t2
, t3
);
3557 tcg_temp_free_i32(t1
);
3558 tcg_temp_free_i32(t2
);
3559 tcg_temp_free_i32(t3
);
3563 static void gen_stswi(DisasContext
*ctx
)
3567 int nb
= NB(ctx
->opcode
);
3568 gen_set_access_type(ctx
, ACCESS_INT
);
3569 /* NIP cannot be restored if the memory exception comes from an helper */
3570 gen_update_nip(ctx
, ctx
->nip
- 4);
3571 t0
= tcg_temp_new();
3572 gen_addr_register(ctx
, t0
);
3575 t1
= tcg_const_i32(nb
);
3576 t2
= tcg_const_i32(rS(ctx
->opcode
));
3577 gen_helper_stsw(cpu_env
, t0
, t1
, t2
);
3579 tcg_temp_free_i32(t1
);
3580 tcg_temp_free_i32(t2
);
3584 static void gen_stswx(DisasContext
*ctx
)
3588 gen_set_access_type(ctx
, ACCESS_INT
);
3589 /* NIP cannot be restored if the memory exception comes from an helper */
3590 gen_update_nip(ctx
, ctx
->nip
- 4);
3591 t0
= tcg_temp_new();
3592 gen_addr_reg_index(ctx
, t0
);
3593 t1
= tcg_temp_new_i32();
3594 tcg_gen_trunc_tl_i32(t1
, cpu_xer
);
3595 tcg_gen_andi_i32(t1
, t1
, 0x7F);
3596 t2
= tcg_const_i32(rS(ctx
->opcode
));
3597 gen_helper_stsw(cpu_env
, t0
, t1
, t2
);
3599 tcg_temp_free_i32(t1
);
3600 tcg_temp_free_i32(t2
);
3603 /*** Memory synchronisation ***/
3605 static void gen_eieio(DisasContext
*ctx
)
3609 #if !defined(CONFIG_USER_ONLY)
3610 static inline void gen_check_tlb_flush(DisasContext
*ctx
)
3615 if (!ctx
->lazy_tlb_flush
) {
3618 l
= gen_new_label();
3619 t
= tcg_temp_new_i32();
3620 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUPPCState
, tlb_need_flush
));
3621 tcg_gen_brcondi_i32(TCG_COND_EQ
, t
, 0, l
);
3622 gen_helper_check_tlb_flush(cpu_env
);
3624 tcg_temp_free_i32(t
);
3627 static inline void gen_check_tlb_flush(DisasContext
*ctx
) { }
3631 static void gen_isync(DisasContext
*ctx
)
3634 * We need to check for a pending TLB flush. This can only happen in
3635 * kernel mode however so check MSR_PR
3638 gen_check_tlb_flush(ctx
);
3640 gen_stop_exception(ctx
);
3643 #define LARX(name, len, loadop) \
3644 static void gen_##name(DisasContext *ctx) \
3647 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; \
3648 gen_set_access_type(ctx, ACCESS_RES); \
3649 t0 = tcg_temp_local_new(); \
3650 gen_addr_reg_index(ctx, t0); \
3652 gen_check_align(ctx, t0, (len)-1); \
3654 gen_qemu_##loadop(ctx, gpr, t0); \
3655 tcg_gen_mov_tl(cpu_reserve, t0); \
3656 tcg_gen_st_tl(gpr, cpu_env, offsetof(CPUPPCState, reserve_val)); \
3657 tcg_temp_free(t0); \
3661 LARX(lbarx
, 1, ld8u
);
3662 LARX(lharx
, 2, ld16u
);
3663 LARX(lwarx
, 4, ld32u
);
3666 #if defined(CONFIG_USER_ONLY)
3667 static void gen_conditional_store(DisasContext
*ctx
, TCGv EA
,
3670 TCGv t0
= tcg_temp_new();
3671 uint32_t save_exception
= ctx
->exception
;
3673 tcg_gen_st_tl(EA
, cpu_env
, offsetof(CPUPPCState
, reserve_ea
));
3674 tcg_gen_movi_tl(t0
, (size
<< 5) | reg
);
3675 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUPPCState
, reserve_info
));
3677 gen_update_nip(ctx
, ctx
->nip
-4);
3678 ctx
->exception
= POWERPC_EXCP_BRANCH
;
3679 gen_exception(ctx
, POWERPC_EXCP_STCX
);
3680 ctx
->exception
= save_exception
;
3683 static void gen_conditional_store(DisasContext
*ctx
, TCGv EA
,
3688 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
3689 l1
= gen_new_label();
3690 tcg_gen_brcond_tl(TCG_COND_NE
, EA
, cpu_reserve
, l1
);
3691 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 1 << CRF_EQ
);
3692 #if defined(TARGET_PPC64)
3694 gen_qemu_st64(ctx
, cpu_gpr
[reg
], EA
);
3698 gen_qemu_st32(ctx
, cpu_gpr
[reg
], EA
);
3699 } else if (size
== 2) {
3700 gen_qemu_st16(ctx
, cpu_gpr
[reg
], EA
);
3701 #if defined(TARGET_PPC64)
3702 } else if (size
== 16) {
3703 TCGv gpr1
, gpr2
, EA8
;
3704 if (unlikely(ctx
->le_mode
)) {
3705 gpr1
= cpu_gpr
[reg
+1];
3706 gpr2
= cpu_gpr
[reg
];
3708 gpr1
= cpu_gpr
[reg
];
3709 gpr2
= cpu_gpr
[reg
+1];
3711 gen_qemu_st64(ctx
, gpr1
, EA
);
3712 EA8
= tcg_temp_local_new();
3713 gen_addr_add(ctx
, EA8
, EA
, 8);
3714 gen_qemu_st64(ctx
, gpr2
, EA8
);
3718 gen_qemu_st8(ctx
, cpu_gpr
[reg
], EA
);
3721 tcg_gen_movi_tl(cpu_reserve
, -1);
3725 #define STCX(name, len) \
3726 static void gen_##name(DisasContext *ctx) \
3729 if (unlikely((len == 16) && (rD(ctx->opcode) & 1))) { \
3730 gen_inval_exception(ctx, \
3731 POWERPC_EXCP_INVAL_INVAL); \
3734 gen_set_access_type(ctx, ACCESS_RES); \
3735 t0 = tcg_temp_local_new(); \
3736 gen_addr_reg_index(ctx, t0); \
3738 gen_check_align(ctx, t0, (len)-1); \
3740 gen_conditional_store(ctx, t0, rS(ctx->opcode), len); \
3741 tcg_temp_free(t0); \
3748 #if defined(TARGET_PPC64)
3750 LARX(ldarx
, 8, ld64
);
3753 static void gen_lqarx(DisasContext
*ctx
)
3756 int rd
= rD(ctx
->opcode
);
3759 if (unlikely((rd
& 1) || (rd
== rA(ctx
->opcode
)) ||
3760 (rd
== rB(ctx
->opcode
)))) {
3761 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3765 gen_set_access_type(ctx
, ACCESS_RES
);
3766 EA
= tcg_temp_local_new();
3767 gen_addr_reg_index(ctx
, EA
);
3768 gen_check_align(ctx
, EA
, 15);
3769 if (unlikely(ctx
->le_mode
)) {
3770 gpr1
= cpu_gpr
[rd
+1];
3774 gpr2
= cpu_gpr
[rd
+1];
3776 gen_qemu_ld64(ctx
, gpr1
, EA
);
3777 tcg_gen_mov_tl(cpu_reserve
, EA
);
3779 gen_addr_add(ctx
, EA
, EA
, 8);
3780 gen_qemu_ld64(ctx
, gpr2
, EA
);
3782 tcg_gen_st_tl(gpr1
, cpu_env
, offsetof(CPUPPCState
, reserve_val
));
3783 tcg_gen_st_tl(gpr2
, cpu_env
, offsetof(CPUPPCState
, reserve_val2
));
3791 #endif /* defined(TARGET_PPC64) */
3794 static void gen_sync(DisasContext
*ctx
)
3796 uint32_t l
= (ctx
->opcode
>> 21) & 3;
3799 * We may need to check for a pending TLB flush.
3801 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
3803 * Additionally, this can only happen in kernel mode however so
3804 * check MSR_PR as well.
3806 if (((l
== 2) || !(ctx
->insns_flags
& PPC_64B
)) && !ctx
->pr
) {
3807 gen_check_tlb_flush(ctx
);
3812 static void gen_wait(DisasContext
*ctx
)
3814 TCGv_i32 t0
= tcg_const_i32(1);
3815 tcg_gen_st_i32(t0
, cpu_env
,
3816 -offsetof(PowerPCCPU
, env
) + offsetof(CPUState
, halted
));
3817 tcg_temp_free_i32(t0
);
3818 /* Stop translation, as the CPU is supposed to sleep from now */
3819 gen_exception_err(ctx
, EXCP_HLT
, 1);
3822 #if defined(TARGET_PPC64)
3823 static void gen_doze(DisasContext
*ctx
)
3825 #if defined(CONFIG_USER_ONLY)
3831 t
= tcg_const_i32(PPC_PM_DOZE
);
3832 gen_helper_pminsn(cpu_env
, t
);
3833 tcg_temp_free_i32(t
);
3834 gen_stop_exception(ctx
);
3835 #endif /* defined(CONFIG_USER_ONLY) */
3838 static void gen_nap(DisasContext
*ctx
)
3840 #if defined(CONFIG_USER_ONLY)
3846 t
= tcg_const_i32(PPC_PM_NAP
);
3847 gen_helper_pminsn(cpu_env
, t
);
3848 tcg_temp_free_i32(t
);
3849 gen_stop_exception(ctx
);
3850 #endif /* defined(CONFIG_USER_ONLY) */
3853 static void gen_sleep(DisasContext
*ctx
)
3855 #if defined(CONFIG_USER_ONLY)
3861 t
= tcg_const_i32(PPC_PM_SLEEP
);
3862 gen_helper_pminsn(cpu_env
, t
);
3863 tcg_temp_free_i32(t
);
3864 gen_stop_exception(ctx
);
3865 #endif /* defined(CONFIG_USER_ONLY) */
3868 static void gen_rvwinkle(DisasContext
*ctx
)
3870 #if defined(CONFIG_USER_ONLY)
3876 t
= tcg_const_i32(PPC_PM_RVWINKLE
);
3877 gen_helper_pminsn(cpu_env
, t
);
3878 tcg_temp_free_i32(t
);
3879 gen_stop_exception(ctx
);
3880 #endif /* defined(CONFIG_USER_ONLY) */
3882 #endif /* #if defined(TARGET_PPC64) */
3884 /*** Floating-point load ***/
3885 #define GEN_LDF(name, ldop, opc, type) \
3886 static void glue(gen_, name)(DisasContext *ctx) \
3889 if (unlikely(!ctx->fpu_enabled)) { \
3890 gen_exception(ctx, POWERPC_EXCP_FPU); \
3893 gen_set_access_type(ctx, ACCESS_FLOAT); \
3894 EA = tcg_temp_new(); \
3895 gen_addr_imm_index(ctx, EA, 0); \
3896 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3897 tcg_temp_free(EA); \
3900 #define GEN_LDUF(name, ldop, opc, type) \
3901 static void glue(gen_, name##u)(DisasContext *ctx) \
3904 if (unlikely(!ctx->fpu_enabled)) { \
3905 gen_exception(ctx, POWERPC_EXCP_FPU); \
3908 if (unlikely(rA(ctx->opcode) == 0)) { \
3909 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3912 gen_set_access_type(ctx, ACCESS_FLOAT); \
3913 EA = tcg_temp_new(); \
3914 gen_addr_imm_index(ctx, EA, 0); \
3915 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3916 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3917 tcg_temp_free(EA); \
3920 #define GEN_LDUXF(name, ldop, opc, type) \
3921 static void glue(gen_, name##ux)(DisasContext *ctx) \
3924 if (unlikely(!ctx->fpu_enabled)) { \
3925 gen_exception(ctx, POWERPC_EXCP_FPU); \
3928 if (unlikely(rA(ctx->opcode) == 0)) { \
3929 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3932 gen_set_access_type(ctx, ACCESS_FLOAT); \
3933 EA = tcg_temp_new(); \
3934 gen_addr_reg_index(ctx, EA); \
3935 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3936 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3937 tcg_temp_free(EA); \
3940 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3941 static void glue(gen_, name##x)(DisasContext *ctx) \
3944 if (unlikely(!ctx->fpu_enabled)) { \
3945 gen_exception(ctx, POWERPC_EXCP_FPU); \
3948 gen_set_access_type(ctx, ACCESS_FLOAT); \
3949 EA = tcg_temp_new(); \
3950 gen_addr_reg_index(ctx, EA); \
3951 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3952 tcg_temp_free(EA); \
3955 #define GEN_LDFS(name, ldop, op, type) \
3956 GEN_LDF(name, ldop, op | 0x20, type); \
3957 GEN_LDUF(name, ldop, op | 0x21, type); \
3958 GEN_LDUXF(name, ldop, op | 0x01, type); \
3959 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3961 static inline void gen_qemu_ld32fs(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3963 TCGv t0
= tcg_temp_new();
3964 TCGv_i32 t1
= tcg_temp_new_i32();
3965 gen_qemu_ld32u(ctx
, t0
, arg2
);
3966 tcg_gen_trunc_tl_i32(t1
, t0
);
3968 gen_helper_float32_to_float64(arg1
, cpu_env
, t1
);
3969 tcg_temp_free_i32(t1
);
3972 /* lfd lfdu lfdux lfdx */
3973 GEN_LDFS(lfd
, ld64
, 0x12, PPC_FLOAT
);
3974 /* lfs lfsu lfsux lfsx */
3975 GEN_LDFS(lfs
, ld32fs
, 0x10, PPC_FLOAT
);
3978 static void gen_lfdp(DisasContext
*ctx
)
3981 if (unlikely(!ctx
->fpu_enabled
)) {
3982 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3985 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3986 EA
= tcg_temp_new();
3987 gen_addr_imm_index(ctx
, EA
, 0);
3988 /* We only need to swap high and low halves. gen_qemu_ld64 does necessary
3989 64-bit byteswap already. */
3990 if (unlikely(ctx
->le_mode
)) {
3991 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3992 tcg_gen_addi_tl(EA
, EA
, 8);
3993 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3995 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3996 tcg_gen_addi_tl(EA
, EA
, 8);
3997 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4003 static void gen_lfdpx(DisasContext
*ctx
)
4006 if (unlikely(!ctx
->fpu_enabled
)) {
4007 gen_exception(ctx
, POWERPC_EXCP_FPU
);
4010 gen_set_access_type(ctx
, ACCESS_FLOAT
);
4011 EA
= tcg_temp_new();
4012 gen_addr_reg_index(ctx
, EA
);
4013 /* We only need to swap high and low halves. gen_qemu_ld64 does necessary
4014 64-bit byteswap already. */
4015 if (unlikely(ctx
->le_mode
)) {
4016 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4017 tcg_gen_addi_tl(EA
, EA
, 8);
4018 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4020 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4021 tcg_gen_addi_tl(EA
, EA
, 8);
4022 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4028 static void gen_lfiwax(DisasContext
*ctx
)
4032 if (unlikely(!ctx
->fpu_enabled
)) {
4033 gen_exception(ctx
, POWERPC_EXCP_FPU
);
4036 gen_set_access_type(ctx
, ACCESS_FLOAT
);
4037 EA
= tcg_temp_new();
4038 t0
= tcg_temp_new();
4039 gen_addr_reg_index(ctx
, EA
);
4040 gen_qemu_ld32s(ctx
, t0
, EA
);
4041 tcg_gen_ext_tl_i64(cpu_fpr
[rD(ctx
->opcode
)], t0
);
4047 static void gen_lfiwzx(DisasContext
*ctx
)
4050 if (unlikely(!ctx
->fpu_enabled
)) {
4051 gen_exception(ctx
, POWERPC_EXCP_FPU
);
4054 gen_set_access_type(ctx
, ACCESS_FLOAT
);
4055 EA
= tcg_temp_new();
4056 gen_addr_reg_index(ctx
, EA
);
4057 gen_qemu_ld32u_i64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4060 /*** Floating-point store ***/
4061 #define GEN_STF(name, stop, opc, type) \
4062 static void glue(gen_, name)(DisasContext *ctx) \
4065 if (unlikely(!ctx->fpu_enabled)) { \
4066 gen_exception(ctx, POWERPC_EXCP_FPU); \
4069 gen_set_access_type(ctx, ACCESS_FLOAT); \
4070 EA = tcg_temp_new(); \
4071 gen_addr_imm_index(ctx, EA, 0); \
4072 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
4073 tcg_temp_free(EA); \
4076 #define GEN_STUF(name, stop, opc, type) \
4077 static void glue(gen_, name##u)(DisasContext *ctx) \
4080 if (unlikely(!ctx->fpu_enabled)) { \
4081 gen_exception(ctx, POWERPC_EXCP_FPU); \
4084 if (unlikely(rA(ctx->opcode) == 0)) { \
4085 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
4088 gen_set_access_type(ctx, ACCESS_FLOAT); \
4089 EA = tcg_temp_new(); \
4090 gen_addr_imm_index(ctx, EA, 0); \
4091 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
4092 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
4093 tcg_temp_free(EA); \
4096 #define GEN_STUXF(name, stop, opc, type) \
4097 static void glue(gen_, name##ux)(DisasContext *ctx) \
4100 if (unlikely(!ctx->fpu_enabled)) { \
4101 gen_exception(ctx, POWERPC_EXCP_FPU); \
4104 if (unlikely(rA(ctx->opcode) == 0)) { \
4105 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
4108 gen_set_access_type(ctx, ACCESS_FLOAT); \
4109 EA = tcg_temp_new(); \
4110 gen_addr_reg_index(ctx, EA); \
4111 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
4112 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
4113 tcg_temp_free(EA); \
4116 #define GEN_STXF(name, stop, opc2, opc3, type) \
4117 static void glue(gen_, name##x)(DisasContext *ctx) \
4120 if (unlikely(!ctx->fpu_enabled)) { \
4121 gen_exception(ctx, POWERPC_EXCP_FPU); \
4124 gen_set_access_type(ctx, ACCESS_FLOAT); \
4125 EA = tcg_temp_new(); \
4126 gen_addr_reg_index(ctx, EA); \
4127 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
4128 tcg_temp_free(EA); \
4131 #define GEN_STFS(name, stop, op, type) \
4132 GEN_STF(name, stop, op | 0x20, type); \
4133 GEN_STUF(name, stop, op | 0x21, type); \
4134 GEN_STUXF(name, stop, op | 0x01, type); \
4135 GEN_STXF(name, stop, 0x17, op | 0x00, type)
4137 static inline void gen_qemu_st32fs(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
4139 TCGv_i32 t0
= tcg_temp_new_i32();
4140 TCGv t1
= tcg_temp_new();
4141 gen_helper_float64_to_float32(t0
, cpu_env
, arg1
);
4142 tcg_gen_extu_i32_tl(t1
, t0
);
4143 tcg_temp_free_i32(t0
);
4144 gen_qemu_st32(ctx
, t1
, arg2
);
4148 /* stfd stfdu stfdux stfdx */
4149 GEN_STFS(stfd
, st64
, 0x16, PPC_FLOAT
);
4150 /* stfs stfsu stfsux stfsx */
4151 GEN_STFS(stfs
, st32fs
, 0x14, PPC_FLOAT
);
4154 static void gen_stfdp(DisasContext
*ctx
)
4157 if (unlikely(!ctx
->fpu_enabled
)) {
4158 gen_exception(ctx
, POWERPC_EXCP_FPU
);
4161 gen_set_access_type(ctx
, ACCESS_FLOAT
);
4162 EA
= tcg_temp_new();
4163 gen_addr_imm_index(ctx
, EA
, 0);
4164 /* We only need to swap high and low halves. gen_qemu_st64 does necessary
4165 64-bit byteswap already. */
4166 if (unlikely(ctx
->le_mode
)) {
4167 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4168 tcg_gen_addi_tl(EA
, EA
, 8);
4169 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4171 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4172 tcg_gen_addi_tl(EA
, EA
, 8);
4173 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4179 static void gen_stfdpx(DisasContext
*ctx
)
4182 if (unlikely(!ctx
->fpu_enabled
)) {
4183 gen_exception(ctx
, POWERPC_EXCP_FPU
);
4186 gen_set_access_type(ctx
, ACCESS_FLOAT
);
4187 EA
= tcg_temp_new();
4188 gen_addr_reg_index(ctx
, EA
);
4189 /* We only need to swap high and low halves. gen_qemu_st64 does necessary
4190 64-bit byteswap already. */
4191 if (unlikely(ctx
->le_mode
)) {
4192 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4193 tcg_gen_addi_tl(EA
, EA
, 8);
4194 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4196 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
4197 tcg_gen_addi_tl(EA
, EA
, 8);
4198 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
4204 static inline void gen_qemu_st32fiw(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
4206 TCGv t0
= tcg_temp_new();
4207 tcg_gen_trunc_i64_tl(t0
, arg1
),
4208 gen_qemu_st32(ctx
, t0
, arg2
);
4212 GEN_STXF(stfiw
, st32fiw
, 0x17, 0x1E, PPC_FLOAT_STFIWX
);
4214 static inline void gen_update_cfar(DisasContext
*ctx
, target_ulong nip
)
4216 #if defined(TARGET_PPC64)
4218 tcg_gen_movi_tl(cpu_cfar
, nip
);
4222 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4224 if (unlikely(ctx
->singlestep_enabled
)) {
4228 #ifndef CONFIG_USER_ONLY
4229 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4236 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4238 if (NARROW_MODE(ctx
)) {
4239 dest
= (uint32_t) dest
;
4241 if (use_goto_tb(ctx
, dest
)) {
4243 tcg_gen_movi_tl(cpu_nip
, dest
& ~3);
4244 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4246 tcg_gen_movi_tl(cpu_nip
, dest
& ~3);
4247 if (unlikely(ctx
->singlestep_enabled
)) {
4248 if ((ctx
->singlestep_enabled
&
4249 (CPU_BRANCH_STEP
| CPU_SINGLE_STEP
)) &&
4250 (ctx
->exception
== POWERPC_EXCP_BRANCH
||
4251 ctx
->exception
== POWERPC_EXCP_TRACE
)) {
4252 target_ulong tmp
= ctx
->nip
;
4254 gen_exception(ctx
, POWERPC_EXCP_TRACE
);
4257 if (ctx
->singlestep_enabled
& GDBSTUB_SINGLE_STEP
) {
4258 gen_debug_exception(ctx
);
4265 static inline void gen_setlr(DisasContext
*ctx
, target_ulong nip
)
4267 if (NARROW_MODE(ctx
)) {
4268 nip
= (uint32_t)nip
;
4270 tcg_gen_movi_tl(cpu_lr
, nip
);
4274 static void gen_b(DisasContext
*ctx
)
4276 target_ulong li
, target
;
4278 ctx
->exception
= POWERPC_EXCP_BRANCH
;
4279 /* sign extend LI */
4280 li
= LI(ctx
->opcode
);
4281 li
= (li
^ 0x02000000) - 0x02000000;
4282 if (likely(AA(ctx
->opcode
) == 0)) {
4283 target
= ctx
->nip
+ li
- 4;
4287 if (LK(ctx
->opcode
)) {
4288 gen_setlr(ctx
, ctx
->nip
);
4290 gen_update_cfar(ctx
, ctx
->nip
);
4291 gen_goto_tb(ctx
, 0, target
);
4299 static inline void gen_bcond(DisasContext
*ctx
, int type
)
4301 uint32_t bo
= BO(ctx
->opcode
);
4305 ctx
->exception
= POWERPC_EXCP_BRANCH
;
4306 if (type
== BCOND_LR
|| type
== BCOND_CTR
|| type
== BCOND_TAR
) {
4307 target
= tcg_temp_local_new();
4308 if (type
== BCOND_CTR
)
4309 tcg_gen_mov_tl(target
, cpu_ctr
);
4310 else if (type
== BCOND_TAR
)
4311 gen_load_spr(target
, SPR_TAR
);
4313 tcg_gen_mov_tl(target
, cpu_lr
);
4315 TCGV_UNUSED(target
);
4317 if (LK(ctx
->opcode
))
4318 gen_setlr(ctx
, ctx
->nip
);
4319 l1
= gen_new_label();
4320 if ((bo
& 0x4) == 0) {
4321 /* Decrement and test CTR */
4322 TCGv temp
= tcg_temp_new();
4323 if (unlikely(type
== BCOND_CTR
)) {
4324 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
4327 tcg_gen_subi_tl(cpu_ctr
, cpu_ctr
, 1);
4328 if (NARROW_MODE(ctx
)) {
4329 tcg_gen_ext32u_tl(temp
, cpu_ctr
);
4331 tcg_gen_mov_tl(temp
, cpu_ctr
);
4334 tcg_gen_brcondi_tl(TCG_COND_NE
, temp
, 0, l1
);
4336 tcg_gen_brcondi_tl(TCG_COND_EQ
, temp
, 0, l1
);
4338 tcg_temp_free(temp
);
4340 if ((bo
& 0x10) == 0) {
4342 uint32_t bi
= BI(ctx
->opcode
);
4343 uint32_t mask
= 0x08 >> (bi
& 0x03);
4344 TCGv_i32 temp
= tcg_temp_new_i32();
4347 tcg_gen_andi_i32(temp
, cpu_crf
[bi
>> 2], mask
);
4348 tcg_gen_brcondi_i32(TCG_COND_EQ
, temp
, 0, l1
);
4350 tcg_gen_andi_i32(temp
, cpu_crf
[bi
>> 2], mask
);
4351 tcg_gen_brcondi_i32(TCG_COND_NE
, temp
, 0, l1
);
4353 tcg_temp_free_i32(temp
);
4355 gen_update_cfar(ctx
, ctx
->nip
);
4356 if (type
== BCOND_IM
) {
4357 target_ulong li
= (target_long
)((int16_t)(BD(ctx
->opcode
)));
4358 if (likely(AA(ctx
->opcode
) == 0)) {
4359 gen_goto_tb(ctx
, 0, ctx
->nip
+ li
- 4);
4361 gen_goto_tb(ctx
, 0, li
);
4364 gen_goto_tb(ctx
, 1, ctx
->nip
);
4366 if (NARROW_MODE(ctx
)) {
4367 tcg_gen_andi_tl(cpu_nip
, target
, (uint32_t)~3);
4369 tcg_gen_andi_tl(cpu_nip
, target
, ~3);
4373 gen_update_nip(ctx
, ctx
->nip
);
4376 if (type
== BCOND_LR
|| type
== BCOND_CTR
|| type
== BCOND_TAR
) {
4377 tcg_temp_free(target
);
4381 static void gen_bc(DisasContext
*ctx
)
4383 gen_bcond(ctx
, BCOND_IM
);
4386 static void gen_bcctr(DisasContext
*ctx
)
4388 gen_bcond(ctx
, BCOND_CTR
);
4391 static void gen_bclr(DisasContext
*ctx
)
4393 gen_bcond(ctx
, BCOND_LR
);
4396 static void gen_bctar(DisasContext
*ctx
)
4398 gen_bcond(ctx
, BCOND_TAR
);
4401 /*** Condition register logical ***/
4402 #define GEN_CRLOGIC(name, tcg_op, opc) \
4403 static void glue(gen_, name)(DisasContext *ctx) \
4408 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4409 t0 = tcg_temp_new_i32(); \
4411 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4413 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4415 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4416 t1 = tcg_temp_new_i32(); \
4417 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4419 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4421 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4423 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4424 tcg_op(t0, t0, t1); \
4425 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4426 tcg_gen_andi_i32(t0, t0, bitmask); \
4427 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4428 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4429 tcg_temp_free_i32(t0); \
4430 tcg_temp_free_i32(t1); \
4434 GEN_CRLOGIC(crand
, tcg_gen_and_i32
, 0x08);
4436 GEN_CRLOGIC(crandc
, tcg_gen_andc_i32
, 0x04);
4438 GEN_CRLOGIC(creqv
, tcg_gen_eqv_i32
, 0x09);
4440 GEN_CRLOGIC(crnand
, tcg_gen_nand_i32
, 0x07);
4442 GEN_CRLOGIC(crnor
, tcg_gen_nor_i32
, 0x01);
4444 GEN_CRLOGIC(cror
, tcg_gen_or_i32
, 0x0E);
4446 GEN_CRLOGIC(crorc
, tcg_gen_orc_i32
, 0x0D);
4448 GEN_CRLOGIC(crxor
, tcg_gen_xor_i32
, 0x06);
4451 static void gen_mcrf(DisasContext
*ctx
)
4453 tcg_gen_mov_i32(cpu_crf
[crfD(ctx
->opcode
)], cpu_crf
[crfS(ctx
->opcode
)]);
4456 /*** System linkage ***/
4458 /* rfi (supervisor only) */
4459 static void gen_rfi(DisasContext
*ctx
)
4461 #if defined(CONFIG_USER_ONLY)
4464 /* FIXME: This instruction doesn't exist anymore on 64-bit server
4465 * processors compliant with arch 2.x, we should remove it there,
4466 * but we need to fix OpenBIOS not to use it on 970 first
4468 /* Restore CPU state */
4470 gen_update_cfar(ctx
, ctx
->nip
);
4471 gen_helper_rfi(cpu_env
);
4472 gen_sync_exception(ctx
);
4476 #if defined(TARGET_PPC64)
4477 static void gen_rfid(DisasContext
*ctx
)
4479 #if defined(CONFIG_USER_ONLY)
4482 /* Restore CPU state */
4484 gen_update_cfar(ctx
, ctx
->nip
);
4485 gen_helper_rfid(cpu_env
);
4486 gen_sync_exception(ctx
);
4490 static void gen_hrfid(DisasContext
*ctx
)
4492 #if defined(CONFIG_USER_ONLY)
4495 /* Restore CPU state */
4497 gen_helper_hrfid(cpu_env
);
4498 gen_sync_exception(ctx
);
4504 #if defined(CONFIG_USER_ONLY)
4505 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4507 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4509 static void gen_sc(DisasContext
*ctx
)
4513 lev
= (ctx
->opcode
>> 5) & 0x7F;
4514 gen_exception_err(ctx
, POWERPC_SYSCALL
, lev
);
4520 static void gen_tw(DisasContext
*ctx
)
4522 TCGv_i32 t0
= tcg_const_i32(TO(ctx
->opcode
));
4523 /* Update the nip since this might generate a trap exception */
4524 gen_update_nip(ctx
, ctx
->nip
);
4525 gen_helper_tw(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
4527 tcg_temp_free_i32(t0
);
4531 static void gen_twi(DisasContext
*ctx
)
4533 TCGv t0
= tcg_const_tl(SIMM(ctx
->opcode
));
4534 TCGv_i32 t1
= tcg_const_i32(TO(ctx
->opcode
));
4535 /* Update the nip since this might generate a trap exception */
4536 gen_update_nip(ctx
, ctx
->nip
);
4537 gen_helper_tw(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
4539 tcg_temp_free_i32(t1
);
4542 #if defined(TARGET_PPC64)
4544 static void gen_td(DisasContext
*ctx
)
4546 TCGv_i32 t0
= tcg_const_i32(TO(ctx
->opcode
));
4547 /* Update the nip since this might generate a trap exception */
4548 gen_update_nip(ctx
, ctx
->nip
);
4549 gen_helper_td(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
4551 tcg_temp_free_i32(t0
);
4555 static void gen_tdi(DisasContext
*ctx
)
4557 TCGv t0
= tcg_const_tl(SIMM(ctx
->opcode
));
4558 TCGv_i32 t1
= tcg_const_i32(TO(ctx
->opcode
));
4559 /* Update the nip since this might generate a trap exception */
4560 gen_update_nip(ctx
, ctx
->nip
);
4561 gen_helper_td(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
4563 tcg_temp_free_i32(t1
);
4567 /*** Processor control ***/
4569 static void gen_read_xer(TCGv dst
)
4571 TCGv t0
= tcg_temp_new();
4572 TCGv t1
= tcg_temp_new();
4573 TCGv t2
= tcg_temp_new();
4574 tcg_gen_mov_tl(dst
, cpu_xer
);
4575 tcg_gen_shli_tl(t0
, cpu_so
, XER_SO
);
4576 tcg_gen_shli_tl(t1
, cpu_ov
, XER_OV
);
4577 tcg_gen_shli_tl(t2
, cpu_ca
, XER_CA
);
4578 tcg_gen_or_tl(t0
, t0
, t1
);
4579 tcg_gen_or_tl(dst
, dst
, t2
);
4580 tcg_gen_or_tl(dst
, dst
, t0
);
4586 static void gen_write_xer(TCGv src
)
4588 tcg_gen_andi_tl(cpu_xer
, src
,
4589 ~((1u << XER_SO
) | (1u << XER_OV
) | (1u << XER_CA
)));
4590 tcg_gen_shri_tl(cpu_so
, src
, XER_SO
);
4591 tcg_gen_shri_tl(cpu_ov
, src
, XER_OV
);
4592 tcg_gen_shri_tl(cpu_ca
, src
, XER_CA
);
4593 tcg_gen_andi_tl(cpu_so
, cpu_so
, 1);
4594 tcg_gen_andi_tl(cpu_ov
, cpu_ov
, 1);
4595 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
4599 static void gen_mcrxr(DisasContext
*ctx
)
4601 TCGv_i32 t0
= tcg_temp_new_i32();
4602 TCGv_i32 t1
= tcg_temp_new_i32();
4603 TCGv_i32 dst
= cpu_crf
[crfD(ctx
->opcode
)];
4605 tcg_gen_trunc_tl_i32(t0
, cpu_so
);
4606 tcg_gen_trunc_tl_i32(t1
, cpu_ov
);
4607 tcg_gen_trunc_tl_i32(dst
, cpu_ca
);
4608 tcg_gen_shli_i32(t0
, t0
, 3);
4609 tcg_gen_shli_i32(t1
, t1
, 2);
4610 tcg_gen_shli_i32(dst
, dst
, 1);
4611 tcg_gen_or_i32(dst
, dst
, t0
);
4612 tcg_gen_or_i32(dst
, dst
, t1
);
4613 tcg_temp_free_i32(t0
);
4614 tcg_temp_free_i32(t1
);
4616 tcg_gen_movi_tl(cpu_so
, 0);
4617 tcg_gen_movi_tl(cpu_ov
, 0);
4618 tcg_gen_movi_tl(cpu_ca
, 0);
4622 static void gen_mfcr(DisasContext
*ctx
)
4626 if (likely(ctx
->opcode
& 0x00100000)) {
4627 crm
= CRM(ctx
->opcode
);
4628 if (likely(crm
&& ((crm
& (crm
- 1)) == 0))) {
4630 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_crf
[7 - crn
]);
4631 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)],
4632 cpu_gpr
[rD(ctx
->opcode
)], crn
* 4);
4635 TCGv_i32 t0
= tcg_temp_new_i32();
4636 tcg_gen_mov_i32(t0
, cpu_crf
[0]);
4637 tcg_gen_shli_i32(t0
, t0
, 4);
4638 tcg_gen_or_i32(t0
, t0
, cpu_crf
[1]);
4639 tcg_gen_shli_i32(t0
, t0
, 4);
4640 tcg_gen_or_i32(t0
, t0
, cpu_crf
[2]);
4641 tcg_gen_shli_i32(t0
, t0
, 4);
4642 tcg_gen_or_i32(t0
, t0
, cpu_crf
[3]);
4643 tcg_gen_shli_i32(t0
, t0
, 4);
4644 tcg_gen_or_i32(t0
, t0
, cpu_crf
[4]);
4645 tcg_gen_shli_i32(t0
, t0
, 4);
4646 tcg_gen_or_i32(t0
, t0
, cpu_crf
[5]);
4647 tcg_gen_shli_i32(t0
, t0
, 4);
4648 tcg_gen_or_i32(t0
, t0
, cpu_crf
[6]);
4649 tcg_gen_shli_i32(t0
, t0
, 4);
4650 tcg_gen_or_i32(t0
, t0
, cpu_crf
[7]);
4651 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
4652 tcg_temp_free_i32(t0
);
4657 static void gen_mfmsr(DisasContext
*ctx
)
4660 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_msr
);
4663 static void spr_noaccess(DisasContext
*ctx
, int gprn
, int sprn
)
4666 sprn
= ((sprn
>> 5) & 0x1F) | ((sprn
& 0x1F) << 5);
4667 printf("ERROR: try to access SPR %d !\n", sprn
);
4670 #define SPR_NOACCESS (&spr_noaccess)
4673 static inline void gen_op_mfspr(DisasContext
*ctx
)
4675 void (*read_cb
)(DisasContext
*ctx
, int gprn
, int sprn
);
4676 uint32_t sprn
= SPR(ctx
->opcode
);
4678 #if defined(CONFIG_USER_ONLY)
4679 read_cb
= ctx
->spr_cb
[sprn
].uea_read
;
4682 read_cb
= ctx
->spr_cb
[sprn
].uea_read
;
4683 } else if (ctx
->hv
) {
4684 read_cb
= ctx
->spr_cb
[sprn
].hea_read
;
4686 read_cb
= ctx
->spr_cb
[sprn
].oea_read
;
4689 if (likely(read_cb
!= NULL
)) {
4690 if (likely(read_cb
!= SPR_NOACCESS
)) {
4691 (*read_cb
)(ctx
, rD(ctx
->opcode
), sprn
);
4693 /* Privilege exception */
4694 /* This is a hack to avoid warnings when running Linux:
4695 * this OS breaks the PowerPC virtualisation model,
4696 * allowing userland application to read the PVR
4698 if (sprn
!= SPR_PVR
) {
4699 fprintf(stderr
, "Trying to read privileged spr %d (0x%03x) at "
4700 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4701 if (qemu_log_separate()) {
4702 qemu_log("Trying to read privileged spr %d (0x%03x) at "
4703 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4706 gen_priv_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4709 /* ISA 2.07 defines these as no-ops */
4710 if ((ctx
->insns_flags2
& PPC2_ISA207S
) &&
4711 (sprn
>= 808 && sprn
<= 811)) {
4716 fprintf(stderr
, "Trying to read invalid spr %d (0x%03x) at "
4717 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4718 if (qemu_log_separate()) {
4719 qemu_log("Trying to read invalid spr %d (0x%03x) at "
4720 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4723 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
4724 * it can generate a priv, a hv emu or a no-op
4728 gen_priv_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4731 if (ctx
->pr
|| sprn
== 0 || sprn
== 4 || sprn
== 5 || sprn
== 6) {
4732 gen_hvpriv_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4738 static void gen_mfspr(DisasContext
*ctx
)
4744 static void gen_mftb(DisasContext
*ctx
)
4750 static void gen_mtcrf(DisasContext
*ctx
)
4754 crm
= CRM(ctx
->opcode
);
4755 if (likely((ctx
->opcode
& 0x00100000))) {
4756 if (crm
&& ((crm
& (crm
- 1)) == 0)) {
4757 TCGv_i32 temp
= tcg_temp_new_i32();
4759 tcg_gen_trunc_tl_i32(temp
, cpu_gpr
[rS(ctx
->opcode
)]);
4760 tcg_gen_shri_i32(temp
, temp
, crn
* 4);
4761 tcg_gen_andi_i32(cpu_crf
[7 - crn
], temp
, 0xf);
4762 tcg_temp_free_i32(temp
);
4765 TCGv_i32 temp
= tcg_temp_new_i32();
4766 tcg_gen_trunc_tl_i32(temp
, cpu_gpr
[rS(ctx
->opcode
)]);
4767 for (crn
= 0 ; crn
< 8 ; crn
++) {
4768 if (crm
& (1 << crn
)) {
4769 tcg_gen_shri_i32(cpu_crf
[7 - crn
], temp
, crn
* 4);
4770 tcg_gen_andi_i32(cpu_crf
[7 - crn
], cpu_crf
[7 - crn
], 0xf);
4773 tcg_temp_free_i32(temp
);
4778 #if defined(TARGET_PPC64)
4779 static void gen_mtmsrd(DisasContext
*ctx
)
4783 #if !defined(CONFIG_USER_ONLY)
4784 if (ctx
->opcode
& 0x00010000) {
4785 /* Special form that does not need any synchronisation */
4786 TCGv t0
= tcg_temp_new();
4787 tcg_gen_andi_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], (1 << MSR_RI
) | (1 << MSR_EE
));
4788 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(target_ulong
)((1 << MSR_RI
) | (1 << MSR_EE
)));
4789 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
4792 /* XXX: we need to update nip before the store
4793 * if we enter power saving mode, we will exit the loop
4794 * directly from ppc_store_msr
4796 gen_update_nip(ctx
, ctx
->nip
);
4797 gen_helper_store_msr(cpu_env
, cpu_gpr
[rS(ctx
->opcode
)]);
4798 /* Must stop the translation as machine state (may have) changed */
4799 /* Note that mtmsr is not always defined as context-synchronizing */
4800 gen_stop_exception(ctx
);
4802 #endif /* !defined(CONFIG_USER_ONLY) */
4804 #endif /* defined(TARGET_PPC64) */
4806 static void gen_mtmsr(DisasContext
*ctx
)
4810 #if !defined(CONFIG_USER_ONLY)
4811 if (ctx
->opcode
& 0x00010000) {
4812 /* Special form that does not need any synchronisation */
4813 TCGv t0
= tcg_temp_new();
4814 tcg_gen_andi_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], (1 << MSR_RI
) | (1 << MSR_EE
));
4815 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(target_ulong
)((1 << MSR_RI
) | (1 << MSR_EE
)));
4816 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
4819 TCGv msr
= tcg_temp_new();
4821 /* XXX: we need to update nip before the store
4822 * if we enter power saving mode, we will exit the loop
4823 * directly from ppc_store_msr
4825 gen_update_nip(ctx
, ctx
->nip
);
4826 #if defined(TARGET_PPC64)
4827 tcg_gen_deposit_tl(msr
, cpu_msr
, cpu_gpr
[rS(ctx
->opcode
)], 0, 32);
4829 tcg_gen_mov_tl(msr
, cpu_gpr
[rS(ctx
->opcode
)]);
4831 gen_helper_store_msr(cpu_env
, msr
);
4833 /* Must stop the translation as machine state (may have) changed */
4834 /* Note that mtmsr is not always defined as context-synchronizing */
4835 gen_stop_exception(ctx
);
4841 static void gen_mtspr(DisasContext
*ctx
)
4843 void (*write_cb
)(DisasContext
*ctx
, int sprn
, int gprn
);
4844 uint32_t sprn
= SPR(ctx
->opcode
);
4846 #if defined(CONFIG_USER_ONLY)
4847 write_cb
= ctx
->spr_cb
[sprn
].uea_write
;
4850 write_cb
= ctx
->spr_cb
[sprn
].uea_write
;
4851 } else if (ctx
->hv
) {
4852 write_cb
= ctx
->spr_cb
[sprn
].hea_write
;
4854 write_cb
= ctx
->spr_cb
[sprn
].oea_write
;
4857 if (likely(write_cb
!= NULL
)) {
4858 if (likely(write_cb
!= SPR_NOACCESS
)) {
4859 (*write_cb
)(ctx
, sprn
, rS(ctx
->opcode
));
4861 /* Privilege exception */
4862 fprintf(stderr
, "Trying to write privileged spr %d (0x%03x) at "
4863 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4864 if (qemu_log_separate()) {
4865 qemu_log("Trying to write privileged spr %d (0x%03x) at "
4866 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4868 gen_priv_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4871 /* ISA 2.07 defines these as no-ops */
4872 if ((ctx
->insns_flags2
& PPC2_ISA207S
) &&
4873 (sprn
>= 808 && sprn
<= 811)) {
4879 if (qemu_log_separate()) {
4880 qemu_log("Trying to write invalid spr %d (0x%03x) at "
4881 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4883 fprintf(stderr
, "Trying to write invalid spr %d (0x%03x) at "
4884 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4887 /* The behaviour depends on MSR:PR and SPR# bit 0x10,
4888 * it can generate a priv, a hv emu or a no-op
4892 gen_priv_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4895 if (ctx
->pr
|| sprn
== 0) {
4896 gen_hvpriv_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4902 #if defined(TARGET_PPC64)
4904 static void gen_setb(DisasContext
*ctx
)
4906 TCGv_i32 t0
= tcg_temp_new_i32();
4907 TCGv_i32 t8
= tcg_temp_new_i32();
4908 TCGv_i32 tm1
= tcg_temp_new_i32();
4909 int crf
= crfS(ctx
->opcode
);
4911 tcg_gen_setcondi_i32(TCG_COND_GEU
, t0
, cpu_crf
[crf
], 4);
4912 tcg_gen_movi_i32(t8
, 8);
4913 tcg_gen_movi_i32(tm1
, -1);
4914 tcg_gen_movcond_i32(TCG_COND_GEU
, t0
, cpu_crf
[crf
], t8
, tm1
, t0
);
4915 tcg_gen_ext_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
4917 tcg_temp_free_i32(t0
);
4918 tcg_temp_free_i32(t8
);
4919 tcg_temp_free_i32(tm1
);
4923 /*** Cache management ***/
4926 static void gen_dcbf(DisasContext
*ctx
)
4928 /* XXX: specification says this is treated as a load by the MMU */
4930 gen_set_access_type(ctx
, ACCESS_CACHE
);
4931 t0
= tcg_temp_new();
4932 gen_addr_reg_index(ctx
, t0
);
4933 gen_qemu_ld8u(ctx
, t0
, t0
);
4937 /* dcbi (Supervisor only) */
4938 static void gen_dcbi(DisasContext
*ctx
)
4940 #if defined(CONFIG_USER_ONLY)
4946 EA
= tcg_temp_new();
4947 gen_set_access_type(ctx
, ACCESS_CACHE
);
4948 gen_addr_reg_index(ctx
, EA
);
4949 val
= tcg_temp_new();
4950 /* XXX: specification says this should be treated as a store by the MMU */
4951 gen_qemu_ld8u(ctx
, val
, EA
);
4952 gen_qemu_st8(ctx
, val
, EA
);
4955 #endif /* defined(CONFIG_USER_ONLY) */
4959 static void gen_dcbst(DisasContext
*ctx
)
4961 /* XXX: specification say this is treated as a load by the MMU */
4963 gen_set_access_type(ctx
, ACCESS_CACHE
);
4964 t0
= tcg_temp_new();
4965 gen_addr_reg_index(ctx
, t0
);
4966 gen_qemu_ld8u(ctx
, t0
, t0
);
4971 static void gen_dcbt(DisasContext
*ctx
)
4973 /* interpreted as no-op */
4974 /* XXX: specification say this is treated as a load by the MMU
4975 * but does not generate any exception
4980 static void gen_dcbtst(DisasContext
*ctx
)
4982 /* interpreted as no-op */
4983 /* XXX: specification say this is treated as a load by the MMU
4984 * but does not generate any exception
4989 static void gen_dcbtls(DisasContext
*ctx
)
4991 /* Always fails locking the cache */
4992 TCGv t0
= tcg_temp_new();
4993 gen_load_spr(t0
, SPR_Exxx_L1CSR0
);
4994 tcg_gen_ori_tl(t0
, t0
, L1CSR0_CUL
);
4995 gen_store_spr(SPR_Exxx_L1CSR0
, t0
);
5000 static void gen_dcbz(DisasContext
*ctx
)
5003 TCGv_i32 tcgv_is_dcbzl
;
5004 int is_dcbzl
= ctx
->opcode
& 0x00200000 ? 1 : 0;
5006 gen_set_access_type(ctx
, ACCESS_CACHE
);
5007 /* NIP cannot be restored if the memory exception comes from an helper */
5008 gen_update_nip(ctx
, ctx
->nip
- 4);
5009 tcgv_addr
= tcg_temp_new();
5010 tcgv_is_dcbzl
= tcg_const_i32(is_dcbzl
);
5012 gen_addr_reg_index(ctx
, tcgv_addr
);
5013 gen_helper_dcbz(cpu_env
, tcgv_addr
, tcgv_is_dcbzl
);
5015 tcg_temp_free(tcgv_addr
);
5016 tcg_temp_free_i32(tcgv_is_dcbzl
);
5020 static void gen_dst(DisasContext
*ctx
)
5022 if (rA(ctx
->opcode
) == 0) {
5023 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
5025 /* interpreted as no-op */
5030 static void gen_dstst(DisasContext
*ctx
)
5032 if (rA(ctx
->opcode
) == 0) {
5033 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
5035 /* interpreted as no-op */
5041 static void gen_dss(DisasContext
*ctx
)
5043 /* interpreted as no-op */
5047 static void gen_icbi(DisasContext
*ctx
)
5050 gen_set_access_type(ctx
, ACCESS_CACHE
);
5051 /* NIP cannot be restored if the memory exception comes from an helper */
5052 gen_update_nip(ctx
, ctx
->nip
- 4);
5053 t0
= tcg_temp_new();
5054 gen_addr_reg_index(ctx
, t0
);
5055 gen_helper_icbi(cpu_env
, t0
);
5061 static void gen_dcba(DisasContext
*ctx
)
5063 /* interpreted as no-op */
5064 /* XXX: specification say this is treated as a store by the MMU
5065 * but does not generate any exception
5069 /*** Segment register manipulation ***/
5070 /* Supervisor only: */
5073 static void gen_mfsr(DisasContext
*ctx
)
5075 #if defined(CONFIG_USER_ONLY)
5081 t0
= tcg_const_tl(SR(ctx
->opcode
));
5082 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5084 #endif /* defined(CONFIG_USER_ONLY) */
5088 static void gen_mfsrin(DisasContext
*ctx
)
5090 #if defined(CONFIG_USER_ONLY)
5096 t0
= tcg_temp_new();
5097 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
5098 tcg_gen_andi_tl(t0
, t0
, 0xF);
5099 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5101 #endif /* defined(CONFIG_USER_ONLY) */
5105 static void gen_mtsr(DisasContext
*ctx
)
5107 #if defined(CONFIG_USER_ONLY)
5113 t0
= tcg_const_tl(SR(ctx
->opcode
));
5114 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
5116 #endif /* defined(CONFIG_USER_ONLY) */
5120 static void gen_mtsrin(DisasContext
*ctx
)
5122 #if defined(CONFIG_USER_ONLY)
5128 t0
= tcg_temp_new();
5129 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
5130 tcg_gen_andi_tl(t0
, t0
, 0xF);
5131 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rD(ctx
->opcode
)]);
5133 #endif /* defined(CONFIG_USER_ONLY) */
5136 #if defined(TARGET_PPC64)
5137 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5140 static void gen_mfsr_64b(DisasContext
*ctx
)
5142 #if defined(CONFIG_USER_ONLY)
5148 t0
= tcg_const_tl(SR(ctx
->opcode
));
5149 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5151 #endif /* defined(CONFIG_USER_ONLY) */
5155 static void gen_mfsrin_64b(DisasContext
*ctx
)
5157 #if defined(CONFIG_USER_ONLY)
5163 t0
= tcg_temp_new();
5164 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
5165 tcg_gen_andi_tl(t0
, t0
, 0xF);
5166 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5168 #endif /* defined(CONFIG_USER_ONLY) */
5172 static void gen_mtsr_64b(DisasContext
*ctx
)
5174 #if defined(CONFIG_USER_ONLY)
5180 t0
= tcg_const_tl(SR(ctx
->opcode
));
5181 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
5183 #endif /* defined(CONFIG_USER_ONLY) */
5187 static void gen_mtsrin_64b(DisasContext
*ctx
)
5189 #if defined(CONFIG_USER_ONLY)
5195 t0
= tcg_temp_new();
5196 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
5197 tcg_gen_andi_tl(t0
, t0
, 0xF);
5198 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
5200 #endif /* defined(CONFIG_USER_ONLY) */
5204 static void gen_slbmte(DisasContext
*ctx
)
5206 #if defined(CONFIG_USER_ONLY)
5211 gen_helper_store_slb(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)],
5212 cpu_gpr
[rS(ctx
->opcode
)]);
5213 #endif /* defined(CONFIG_USER_ONLY) */
5216 static void gen_slbmfee(DisasContext
*ctx
)
5218 #if defined(CONFIG_USER_ONLY)
5223 gen_helper_load_slb_esid(cpu_gpr
[rS(ctx
->opcode
)], cpu_env
,
5224 cpu_gpr
[rB(ctx
->opcode
)]);
5225 #endif /* defined(CONFIG_USER_ONLY) */
5228 static void gen_slbmfev(DisasContext
*ctx
)
5230 #if defined(CONFIG_USER_ONLY)
5235 gen_helper_load_slb_vsid(cpu_gpr
[rS(ctx
->opcode
)], cpu_env
,
5236 cpu_gpr
[rB(ctx
->opcode
)]);
5237 #endif /* defined(CONFIG_USER_ONLY) */
5240 static void gen_slbfee_(DisasContext
*ctx
)
5242 #if defined(CONFIG_USER_ONLY)
5243 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5247 if (unlikely(ctx
->pr
)) {
5248 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5251 gen_helper_find_slb_vsid(cpu_gpr
[rS(ctx
->opcode
)], cpu_env
,
5252 cpu_gpr
[rB(ctx
->opcode
)]);
5253 l1
= gen_new_label();
5254 l2
= gen_new_label();
5255 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
5256 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rS(ctx
->opcode
)], -1, l1
);
5257 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 1 << CRF_EQ
);
5260 tcg_gen_movi_tl(cpu_gpr
[rS(ctx
->opcode
)], 0);
5264 #endif /* defined(TARGET_PPC64) */
5266 /*** Lookaside buffer management ***/
5267 /* Optional & supervisor only: */
5270 static void gen_tlbia(DisasContext
*ctx
)
5272 #if defined(CONFIG_USER_ONLY)
5277 gen_helper_tlbia(cpu_env
);
5278 #endif /* defined(CONFIG_USER_ONLY) */
5282 static void gen_tlbiel(DisasContext
*ctx
)
5284 #if defined(CONFIG_USER_ONLY)
5289 gen_helper_tlbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5290 #endif /* defined(CONFIG_USER_ONLY) */
5294 static void gen_tlbie(DisasContext
*ctx
)
5296 #if defined(CONFIG_USER_ONLY)
5301 if (NARROW_MODE(ctx
)) {
5302 TCGv t0
= tcg_temp_new();
5303 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
5304 gen_helper_tlbie(cpu_env
, t0
);
5307 gen_helper_tlbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5309 #endif /* defined(CONFIG_USER_ONLY) */
5313 static void gen_tlbsync(DisasContext
*ctx
)
5315 #if defined(CONFIG_USER_ONLY)
5320 /* tlbsync is a nop for server, ptesync handles delayed tlb flush,
5321 * embedded however needs to deal with tlbsync. We don't try to be
5322 * fancy and swallow the overhead of checking for both.
5324 gen_check_tlb_flush(ctx
);
5325 #endif /* defined(CONFIG_USER_ONLY) */
5328 #if defined(TARGET_PPC64)
5330 static void gen_slbia(DisasContext
*ctx
)
5332 #if defined(CONFIG_USER_ONLY)
5337 gen_helper_slbia(cpu_env
);
5338 #endif /* defined(CONFIG_USER_ONLY) */
5342 static void gen_slbie(DisasContext
*ctx
)
5344 #if defined(CONFIG_USER_ONLY)
5349 gen_helper_slbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5350 #endif /* defined(CONFIG_USER_ONLY) */
5352 #endif /* defined(TARGET_PPC64) */
5354 /*** External control ***/
5358 static void gen_eciwx(DisasContext
*ctx
)
5361 /* Should check EAR[E] ! */
5362 gen_set_access_type(ctx
, ACCESS_EXT
);
5363 t0
= tcg_temp_new();
5364 gen_addr_reg_index(ctx
, t0
);
5365 gen_check_align(ctx
, t0
, 0x03);
5366 gen_qemu_ld32u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], t0
);
5371 static void gen_ecowx(DisasContext
*ctx
)
5374 /* Should check EAR[E] ! */
5375 gen_set_access_type(ctx
, ACCESS_EXT
);
5376 t0
= tcg_temp_new();
5377 gen_addr_reg_index(ctx
, t0
);
5378 gen_check_align(ctx
, t0
, 0x03);
5379 gen_qemu_st32(ctx
, cpu_gpr
[rD(ctx
->opcode
)], t0
);
5383 /* PowerPC 601 specific instructions */
5386 static void gen_abs(DisasContext
*ctx
)
5388 TCGLabel
*l1
= gen_new_label();
5389 TCGLabel
*l2
= gen_new_label();
5390 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
5391 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5394 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5396 if (unlikely(Rc(ctx
->opcode
) != 0))
5397 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5401 static void gen_abso(DisasContext
*ctx
)
5403 TCGLabel
*l1
= gen_new_label();
5404 TCGLabel
*l2
= gen_new_label();
5405 TCGLabel
*l3
= gen_new_label();
5406 /* Start with XER OV disabled, the most likely case */
5407 tcg_gen_movi_tl(cpu_ov
, 0);
5408 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rA(ctx
->opcode
)], 0, l2
);
5409 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rA(ctx
->opcode
)], 0x80000000, l1
);
5410 tcg_gen_movi_tl(cpu_ov
, 1);
5411 tcg_gen_movi_tl(cpu_so
, 1);
5414 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5417 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5419 if (unlikely(Rc(ctx
->opcode
) != 0))
5420 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5424 static void gen_clcs(DisasContext
*ctx
)
5426 TCGv_i32 t0
= tcg_const_i32(rA(ctx
->opcode
));
5427 gen_helper_clcs(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5428 tcg_temp_free_i32(t0
);
5429 /* Rc=1 sets CR0 to an undefined state */
5433 static void gen_div(DisasContext
*ctx
)
5435 gen_helper_div(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
5436 cpu_gpr
[rB(ctx
->opcode
)]);
5437 if (unlikely(Rc(ctx
->opcode
) != 0))
5438 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5442 static void gen_divo(DisasContext
*ctx
)
5444 gen_helper_divo(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
5445 cpu_gpr
[rB(ctx
->opcode
)]);
5446 if (unlikely(Rc(ctx
->opcode
) != 0))
5447 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5451 static void gen_divs(DisasContext
*ctx
)
5453 gen_helper_divs(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
5454 cpu_gpr
[rB(ctx
->opcode
)]);
5455 if (unlikely(Rc(ctx
->opcode
) != 0))
5456 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5459 /* divso - divso. */
5460 static void gen_divso(DisasContext
*ctx
)
5462 gen_helper_divso(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
5463 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
5464 if (unlikely(Rc(ctx
->opcode
) != 0))
5465 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5469 static void gen_doz(DisasContext
*ctx
)
5471 TCGLabel
*l1
= gen_new_label();
5472 TCGLabel
*l2
= gen_new_label();
5473 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], l1
);
5474 tcg_gen_sub_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5477 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
5479 if (unlikely(Rc(ctx
->opcode
) != 0))
5480 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5484 static void gen_dozo(DisasContext
*ctx
)
5486 TCGLabel
*l1
= gen_new_label();
5487 TCGLabel
*l2
= gen_new_label();
5488 TCGv t0
= tcg_temp_new();
5489 TCGv t1
= tcg_temp_new();
5490 TCGv t2
= tcg_temp_new();
5491 /* Start with XER OV disabled, the most likely case */
5492 tcg_gen_movi_tl(cpu_ov
, 0);
5493 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], l1
);
5494 tcg_gen_sub_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5495 tcg_gen_xor_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5496 tcg_gen_xor_tl(t2
, cpu_gpr
[rA(ctx
->opcode
)], t0
);
5497 tcg_gen_andc_tl(t1
, t1
, t2
);
5498 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
5499 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l2
);
5500 tcg_gen_movi_tl(cpu_ov
, 1);
5501 tcg_gen_movi_tl(cpu_so
, 1);
5504 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
5509 if (unlikely(Rc(ctx
->opcode
) != 0))
5510 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5514 static void gen_dozi(DisasContext
*ctx
)
5516 target_long simm
= SIMM(ctx
->opcode
);
5517 TCGLabel
*l1
= gen_new_label();
5518 TCGLabel
*l2
= gen_new_label();
5519 tcg_gen_brcondi_tl(TCG_COND_LT
, cpu_gpr
[rA(ctx
->opcode
)], simm
, l1
);
5520 tcg_gen_subfi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
, cpu_gpr
[rA(ctx
->opcode
)]);
5523 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
5525 if (unlikely(Rc(ctx
->opcode
) != 0))
5526 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5529 /* lscbx - lscbx. */
5530 static void gen_lscbx(DisasContext
*ctx
)
5532 TCGv t0
= tcg_temp_new();
5533 TCGv_i32 t1
= tcg_const_i32(rD(ctx
->opcode
));
5534 TCGv_i32 t2
= tcg_const_i32(rA(ctx
->opcode
));
5535 TCGv_i32 t3
= tcg_const_i32(rB(ctx
->opcode
));
5537 gen_addr_reg_index(ctx
, t0
);
5538 /* NIP cannot be restored if the memory exception comes from an helper */
5539 gen_update_nip(ctx
, ctx
->nip
- 4);
5540 gen_helper_lscbx(t0
, cpu_env
, t0
, t1
, t2
, t3
);
5541 tcg_temp_free_i32(t1
);
5542 tcg_temp_free_i32(t2
);
5543 tcg_temp_free_i32(t3
);
5544 tcg_gen_andi_tl(cpu_xer
, cpu_xer
, ~0x7F);
5545 tcg_gen_or_tl(cpu_xer
, cpu_xer
, t0
);
5546 if (unlikely(Rc(ctx
->opcode
) != 0))
5547 gen_set_Rc0(ctx
, t0
);
5551 /* maskg - maskg. */
5552 static void gen_maskg(DisasContext
*ctx
)
5554 TCGLabel
*l1
= gen_new_label();
5555 TCGv t0
= tcg_temp_new();
5556 TCGv t1
= tcg_temp_new();
5557 TCGv t2
= tcg_temp_new();
5558 TCGv t3
= tcg_temp_new();
5559 tcg_gen_movi_tl(t3
, 0xFFFFFFFF);
5560 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5561 tcg_gen_andi_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 0x1F);
5562 tcg_gen_addi_tl(t2
, t0
, 1);
5563 tcg_gen_shr_tl(t2
, t3
, t2
);
5564 tcg_gen_shr_tl(t3
, t3
, t1
);
5565 tcg_gen_xor_tl(cpu_gpr
[rA(ctx
->opcode
)], t2
, t3
);
5566 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5567 tcg_gen_neg_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5573 if (unlikely(Rc(ctx
->opcode
) != 0))
5574 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5577 /* maskir - maskir. */
5578 static void gen_maskir(DisasContext
*ctx
)
5580 TCGv t0
= tcg_temp_new();
5581 TCGv t1
= tcg_temp_new();
5582 tcg_gen_and_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
5583 tcg_gen_andc_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
5584 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5587 if (unlikely(Rc(ctx
->opcode
) != 0))
5588 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5592 static void gen_mul(DisasContext
*ctx
)
5594 TCGv_i64 t0
= tcg_temp_new_i64();
5595 TCGv_i64 t1
= tcg_temp_new_i64();
5596 TCGv t2
= tcg_temp_new();
5597 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
5598 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
5599 tcg_gen_mul_i64(t0
, t0
, t1
);
5600 tcg_gen_trunc_i64_tl(t2
, t0
);
5601 gen_store_spr(SPR_MQ
, t2
);
5602 tcg_gen_shri_i64(t1
, t0
, 32);
5603 tcg_gen_trunc_i64_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
5604 tcg_temp_free_i64(t0
);
5605 tcg_temp_free_i64(t1
);
5607 if (unlikely(Rc(ctx
->opcode
) != 0))
5608 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5612 static void gen_mulo(DisasContext
*ctx
)
5614 TCGLabel
*l1
= gen_new_label();
5615 TCGv_i64 t0
= tcg_temp_new_i64();
5616 TCGv_i64 t1
= tcg_temp_new_i64();
5617 TCGv t2
= tcg_temp_new();
5618 /* Start with XER OV disabled, the most likely case */
5619 tcg_gen_movi_tl(cpu_ov
, 0);
5620 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
5621 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
5622 tcg_gen_mul_i64(t0
, t0
, t1
);
5623 tcg_gen_trunc_i64_tl(t2
, t0
);
5624 gen_store_spr(SPR_MQ
, t2
);
5625 tcg_gen_shri_i64(t1
, t0
, 32);
5626 tcg_gen_trunc_i64_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
5627 tcg_gen_ext32s_i64(t1
, t0
);
5628 tcg_gen_brcond_i64(TCG_COND_EQ
, t0
, t1
, l1
);
5629 tcg_gen_movi_tl(cpu_ov
, 1);
5630 tcg_gen_movi_tl(cpu_so
, 1);
5632 tcg_temp_free_i64(t0
);
5633 tcg_temp_free_i64(t1
);
5635 if (unlikely(Rc(ctx
->opcode
) != 0))
5636 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5640 static void gen_nabs(DisasContext
*ctx
)
5642 TCGLabel
*l1
= gen_new_label();
5643 TCGLabel
*l2
= gen_new_label();
5644 tcg_gen_brcondi_tl(TCG_COND_GT
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
5645 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5648 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5650 if (unlikely(Rc(ctx
->opcode
) != 0))
5651 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5654 /* nabso - nabso. */
5655 static void gen_nabso(DisasContext
*ctx
)
5657 TCGLabel
*l1
= gen_new_label();
5658 TCGLabel
*l2
= gen_new_label();
5659 tcg_gen_brcondi_tl(TCG_COND_GT
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
5660 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5663 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5665 /* nabs never overflows */
5666 tcg_gen_movi_tl(cpu_ov
, 0);
5667 if (unlikely(Rc(ctx
->opcode
) != 0))
5668 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
5672 static void gen_rlmi(DisasContext
*ctx
)
5674 uint32_t mb
= MB(ctx
->opcode
);
5675 uint32_t me
= ME(ctx
->opcode
);
5676 TCGv t0
= tcg_temp_new();
5677 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5678 tcg_gen_rotl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5679 tcg_gen_andi_tl(t0
, t0
, MASK(mb
, me
));
5680 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], ~MASK(mb
, me
));
5681 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], t0
);
5683 if (unlikely(Rc(ctx
->opcode
) != 0))
5684 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5688 static void gen_rrib(DisasContext
*ctx
)
5690 TCGv t0
= tcg_temp_new();
5691 TCGv t1
= tcg_temp_new();
5692 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5693 tcg_gen_movi_tl(t1
, 0x80000000);
5694 tcg_gen_shr_tl(t1
, t1
, t0
);
5695 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5696 tcg_gen_and_tl(t0
, t0
, t1
);
5697 tcg_gen_andc_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], t1
);
5698 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5701 if (unlikely(Rc(ctx
->opcode
) != 0))
5702 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5706 static void gen_sle(DisasContext
*ctx
)
5708 TCGv t0
= tcg_temp_new();
5709 TCGv t1
= tcg_temp_new();
5710 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5711 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5712 tcg_gen_subfi_tl(t1
, 32, t1
);
5713 tcg_gen_shr_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5714 tcg_gen_or_tl(t1
, t0
, t1
);
5715 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5716 gen_store_spr(SPR_MQ
, t1
);
5719 if (unlikely(Rc(ctx
->opcode
) != 0))
5720 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5724 static void gen_sleq(DisasContext
*ctx
)
5726 TCGv t0
= tcg_temp_new();
5727 TCGv t1
= tcg_temp_new();
5728 TCGv t2
= tcg_temp_new();
5729 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5730 tcg_gen_movi_tl(t2
, 0xFFFFFFFF);
5731 tcg_gen_shl_tl(t2
, t2
, t0
);
5732 tcg_gen_rotl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5733 gen_load_spr(t1
, SPR_MQ
);
5734 gen_store_spr(SPR_MQ
, t0
);
5735 tcg_gen_and_tl(t0
, t0
, t2
);
5736 tcg_gen_andc_tl(t1
, t1
, t2
);
5737 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5741 if (unlikely(Rc(ctx
->opcode
) != 0))
5742 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5746 static void gen_sliq(DisasContext
*ctx
)
5748 int sh
= SH(ctx
->opcode
);
5749 TCGv t0
= tcg_temp_new();
5750 TCGv t1
= tcg_temp_new();
5751 tcg_gen_shli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5752 tcg_gen_shri_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5753 tcg_gen_or_tl(t1
, t0
, t1
);
5754 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5755 gen_store_spr(SPR_MQ
, t1
);
5758 if (unlikely(Rc(ctx
->opcode
) != 0))
5759 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5762 /* slliq - slliq. */
5763 static void gen_slliq(DisasContext
*ctx
)
5765 int sh
= SH(ctx
->opcode
);
5766 TCGv t0
= tcg_temp_new();
5767 TCGv t1
= tcg_temp_new();
5768 tcg_gen_rotli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5769 gen_load_spr(t1
, SPR_MQ
);
5770 gen_store_spr(SPR_MQ
, t0
);
5771 tcg_gen_andi_tl(t0
, t0
, (0xFFFFFFFFU
<< sh
));
5772 tcg_gen_andi_tl(t1
, t1
, ~(0xFFFFFFFFU
<< sh
));
5773 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5776 if (unlikely(Rc(ctx
->opcode
) != 0))
5777 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5781 static void gen_sllq(DisasContext
*ctx
)
5783 TCGLabel
*l1
= gen_new_label();
5784 TCGLabel
*l2
= gen_new_label();
5785 TCGv t0
= tcg_temp_local_new();
5786 TCGv t1
= tcg_temp_local_new();
5787 TCGv t2
= tcg_temp_local_new();
5788 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5789 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5790 tcg_gen_shl_tl(t1
, t1
, t2
);
5791 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5792 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
5793 gen_load_spr(t0
, SPR_MQ
);
5794 tcg_gen_and_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5797 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5798 gen_load_spr(t2
, SPR_MQ
);
5799 tcg_gen_andc_tl(t1
, t2
, t1
);
5800 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5805 if (unlikely(Rc(ctx
->opcode
) != 0))
5806 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5810 static void gen_slq(DisasContext
*ctx
)
5812 TCGLabel
*l1
= gen_new_label();
5813 TCGv t0
= tcg_temp_new();
5814 TCGv t1
= tcg_temp_new();
5815 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5816 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5817 tcg_gen_subfi_tl(t1
, 32, t1
);
5818 tcg_gen_shr_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5819 tcg_gen_or_tl(t1
, t0
, t1
);
5820 gen_store_spr(SPR_MQ
, t1
);
5821 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5822 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5823 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5824 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
5828 if (unlikely(Rc(ctx
->opcode
) != 0))
5829 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5832 /* sraiq - sraiq. */
5833 static void gen_sraiq(DisasContext
*ctx
)
5835 int sh
= SH(ctx
->opcode
);
5836 TCGLabel
*l1
= gen_new_label();
5837 TCGv t0
= tcg_temp_new();
5838 TCGv t1
= tcg_temp_new();
5839 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5840 tcg_gen_shli_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5841 tcg_gen_or_tl(t0
, t0
, t1
);
5842 gen_store_spr(SPR_MQ
, t0
);
5843 tcg_gen_movi_tl(cpu_ca
, 0);
5844 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5845 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rS(ctx
->opcode
)], 0, l1
);
5846 tcg_gen_movi_tl(cpu_ca
, 1);
5848 tcg_gen_sari_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], sh
);
5851 if (unlikely(Rc(ctx
->opcode
) != 0))
5852 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5856 static void gen_sraq(DisasContext
*ctx
)
5858 TCGLabel
*l1
= gen_new_label();
5859 TCGLabel
*l2
= gen_new_label();
5860 TCGv t0
= tcg_temp_new();
5861 TCGv t1
= tcg_temp_local_new();
5862 TCGv t2
= tcg_temp_local_new();
5863 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5864 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5865 tcg_gen_sar_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5866 tcg_gen_subfi_tl(t2
, 32, t2
);
5867 tcg_gen_shl_tl(t2
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5868 tcg_gen_or_tl(t0
, t0
, t2
);
5869 gen_store_spr(SPR_MQ
, t0
);
5870 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5871 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, l1
);
5872 tcg_gen_mov_tl(t2
, cpu_gpr
[rS(ctx
->opcode
)]);
5873 tcg_gen_sari_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 31);
5876 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t1
);
5877 tcg_gen_movi_tl(cpu_ca
, 0);
5878 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l2
);
5879 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, l2
);
5880 tcg_gen_movi_tl(cpu_ca
, 1);
5884 if (unlikely(Rc(ctx
->opcode
) != 0))
5885 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5889 static void gen_sre(DisasContext
*ctx
)
5891 TCGv t0
= tcg_temp_new();
5892 TCGv t1
= tcg_temp_new();
5893 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5894 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5895 tcg_gen_subfi_tl(t1
, 32, t1
);
5896 tcg_gen_shl_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5897 tcg_gen_or_tl(t1
, t0
, t1
);
5898 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5899 gen_store_spr(SPR_MQ
, t1
);
5902 if (unlikely(Rc(ctx
->opcode
) != 0))
5903 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5907 static void gen_srea(DisasContext
*ctx
)
5909 TCGv t0
= tcg_temp_new();
5910 TCGv t1
= tcg_temp_new();
5911 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5912 tcg_gen_rotr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5913 gen_store_spr(SPR_MQ
, t0
);
5914 tcg_gen_sar_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], t1
);
5917 if (unlikely(Rc(ctx
->opcode
) != 0))
5918 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5922 static void gen_sreq(DisasContext
*ctx
)
5924 TCGv t0
= tcg_temp_new();
5925 TCGv t1
= tcg_temp_new();
5926 TCGv t2
= tcg_temp_new();
5927 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5928 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5929 tcg_gen_shr_tl(t1
, t1
, t0
);
5930 tcg_gen_rotr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5931 gen_load_spr(t2
, SPR_MQ
);
5932 gen_store_spr(SPR_MQ
, t0
);
5933 tcg_gen_and_tl(t0
, t0
, t1
);
5934 tcg_gen_andc_tl(t2
, t2
, t1
);
5935 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t2
);
5939 if (unlikely(Rc(ctx
->opcode
) != 0))
5940 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5944 static void gen_sriq(DisasContext
*ctx
)
5946 int sh
= SH(ctx
->opcode
);
5947 TCGv t0
= tcg_temp_new();
5948 TCGv t1
= tcg_temp_new();
5949 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5950 tcg_gen_shli_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5951 tcg_gen_or_tl(t1
, t0
, t1
);
5952 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5953 gen_store_spr(SPR_MQ
, t1
);
5956 if (unlikely(Rc(ctx
->opcode
) != 0))
5957 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5961 static void gen_srliq(DisasContext
*ctx
)
5963 int sh
= SH(ctx
->opcode
);
5964 TCGv t0
= tcg_temp_new();
5965 TCGv t1
= tcg_temp_new();
5966 tcg_gen_rotri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5967 gen_load_spr(t1
, SPR_MQ
);
5968 gen_store_spr(SPR_MQ
, t0
);
5969 tcg_gen_andi_tl(t0
, t0
, (0xFFFFFFFFU
>> sh
));
5970 tcg_gen_andi_tl(t1
, t1
, ~(0xFFFFFFFFU
>> sh
));
5971 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5974 if (unlikely(Rc(ctx
->opcode
) != 0))
5975 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5979 static void gen_srlq(DisasContext
*ctx
)
5981 TCGLabel
*l1
= gen_new_label();
5982 TCGLabel
*l2
= gen_new_label();
5983 TCGv t0
= tcg_temp_local_new();
5984 TCGv t1
= tcg_temp_local_new();
5985 TCGv t2
= tcg_temp_local_new();
5986 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5987 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5988 tcg_gen_shr_tl(t2
, t1
, t2
);
5989 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5990 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
5991 gen_load_spr(t0
, SPR_MQ
);
5992 tcg_gen_and_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t2
);
5995 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5996 tcg_gen_and_tl(t0
, t0
, t2
);
5997 gen_load_spr(t1
, SPR_MQ
);
5998 tcg_gen_andc_tl(t1
, t1
, t2
);
5999 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
6004 if (unlikely(Rc(ctx
->opcode
) != 0))
6005 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
6009 static void gen_srq(DisasContext
*ctx
)
6011 TCGLabel
*l1
= gen_new_label();
6012 TCGv t0
= tcg_temp_new();
6013 TCGv t1
= tcg_temp_new();
6014 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
6015 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
6016 tcg_gen_subfi_tl(t1
, 32, t1
);
6017 tcg_gen_shl_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
6018 tcg_gen_or_tl(t1
, t0
, t1
);
6019 gen_store_spr(SPR_MQ
, t1
);
6020 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
6021 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
6022 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6023 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
6027 if (unlikely(Rc(ctx
->opcode
) != 0))
6028 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
6031 /* PowerPC 602 specific instructions */
6034 static void gen_dsa(DisasContext
*ctx
)
6037 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6041 static void gen_esa(DisasContext
*ctx
)
6044 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6048 static void gen_mfrom(DisasContext
*ctx
)
6050 #if defined(CONFIG_USER_ONLY)
6054 gen_helper_602_mfrom(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
6055 #endif /* defined(CONFIG_USER_ONLY) */
6058 /* 602 - 603 - G2 TLB management */
6061 static void gen_tlbld_6xx(DisasContext
*ctx
)
6063 #if defined(CONFIG_USER_ONLY)
6067 gen_helper_6xx_tlbd(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6068 #endif /* defined(CONFIG_USER_ONLY) */
6072 static void gen_tlbli_6xx(DisasContext
*ctx
)
6074 #if defined(CONFIG_USER_ONLY)
6078 gen_helper_6xx_tlbi(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6079 #endif /* defined(CONFIG_USER_ONLY) */
6082 /* 74xx TLB management */
6085 static void gen_tlbld_74xx(DisasContext
*ctx
)
6087 #if defined(CONFIG_USER_ONLY)
6091 gen_helper_74xx_tlbd(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6092 #endif /* defined(CONFIG_USER_ONLY) */
6096 static void gen_tlbli_74xx(DisasContext
*ctx
)
6098 #if defined(CONFIG_USER_ONLY)
6102 gen_helper_74xx_tlbi(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6103 #endif /* defined(CONFIG_USER_ONLY) */
6106 /* POWER instructions not in PowerPC 601 */
6109 static void gen_clf(DisasContext
*ctx
)
6111 /* Cache line flush: implemented as no-op */
6115 static void gen_cli(DisasContext
*ctx
)
6117 #if defined(CONFIG_USER_ONLY)
6120 /* Cache line invalidate: privileged and treated as no-op */
6122 #endif /* defined(CONFIG_USER_ONLY) */
6126 static void gen_dclst(DisasContext
*ctx
)
6128 /* Data cache line store: treated as no-op */
6131 static void gen_mfsri(DisasContext
*ctx
)
6133 #if defined(CONFIG_USER_ONLY)
6136 int ra
= rA(ctx
->opcode
);
6137 int rd
= rD(ctx
->opcode
);
6141 t0
= tcg_temp_new();
6142 gen_addr_reg_index(ctx
, t0
);
6143 tcg_gen_shri_tl(t0
, t0
, 28);
6144 tcg_gen_andi_tl(t0
, t0
, 0xF);
6145 gen_helper_load_sr(cpu_gpr
[rd
], cpu_env
, t0
);
6147 if (ra
!= 0 && ra
!= rd
)
6148 tcg_gen_mov_tl(cpu_gpr
[ra
], cpu_gpr
[rd
]);
6149 #endif /* defined(CONFIG_USER_ONLY) */
6152 static void gen_rac(DisasContext
*ctx
)
6154 #if defined(CONFIG_USER_ONLY)
6160 t0
= tcg_temp_new();
6161 gen_addr_reg_index(ctx
, t0
);
6162 gen_helper_rac(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
6164 #endif /* defined(CONFIG_USER_ONLY) */
6167 static void gen_rfsvc(DisasContext
*ctx
)
6169 #if defined(CONFIG_USER_ONLY)
6174 gen_helper_rfsvc(cpu_env
);
6175 gen_sync_exception(ctx
);
6176 #endif /* defined(CONFIG_USER_ONLY) */
6179 /* svc is not implemented for now */
6181 /* POWER2 specific instructions */
6182 /* Quad manipulation (load/store two floats at a time) */
6185 static void gen_lfq(DisasContext
*ctx
)
6187 int rd
= rD(ctx
->opcode
);
6189 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6190 t0
= tcg_temp_new();
6191 gen_addr_imm_index(ctx
, t0
, 0);
6192 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
6193 gen_addr_add(ctx
, t0
, t0
, 8);
6194 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
6199 static void gen_lfqu(DisasContext
*ctx
)
6201 int ra
= rA(ctx
->opcode
);
6202 int rd
= rD(ctx
->opcode
);
6204 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6205 t0
= tcg_temp_new();
6206 t1
= tcg_temp_new();
6207 gen_addr_imm_index(ctx
, t0
, 0);
6208 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
6209 gen_addr_add(ctx
, t1
, t0
, 8);
6210 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
6212 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
6218 static void gen_lfqux(DisasContext
*ctx
)
6220 int ra
= rA(ctx
->opcode
);
6221 int rd
= rD(ctx
->opcode
);
6222 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6224 t0
= tcg_temp_new();
6225 gen_addr_reg_index(ctx
, t0
);
6226 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
6227 t1
= tcg_temp_new();
6228 gen_addr_add(ctx
, t1
, t0
, 8);
6229 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
6232 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
6237 static void gen_lfqx(DisasContext
*ctx
)
6239 int rd
= rD(ctx
->opcode
);
6241 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6242 t0
= tcg_temp_new();
6243 gen_addr_reg_index(ctx
, t0
);
6244 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
6245 gen_addr_add(ctx
, t0
, t0
, 8);
6246 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
6251 static void gen_stfq(DisasContext
*ctx
)
6253 int rd
= rD(ctx
->opcode
);
6255 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6256 t0
= tcg_temp_new();
6257 gen_addr_imm_index(ctx
, t0
, 0);
6258 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
6259 gen_addr_add(ctx
, t0
, t0
, 8);
6260 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
6265 static void gen_stfqu(DisasContext
*ctx
)
6267 int ra
= rA(ctx
->opcode
);
6268 int rd
= rD(ctx
->opcode
);
6270 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6271 t0
= tcg_temp_new();
6272 gen_addr_imm_index(ctx
, t0
, 0);
6273 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
6274 t1
= tcg_temp_new();
6275 gen_addr_add(ctx
, t1
, t0
, 8);
6276 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
6279 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
6284 static void gen_stfqux(DisasContext
*ctx
)
6286 int ra
= rA(ctx
->opcode
);
6287 int rd
= rD(ctx
->opcode
);
6289 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6290 t0
= tcg_temp_new();
6291 gen_addr_reg_index(ctx
, t0
);
6292 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
6293 t1
= tcg_temp_new();
6294 gen_addr_add(ctx
, t1
, t0
, 8);
6295 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
6298 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
6303 static void gen_stfqx(DisasContext
*ctx
)
6305 int rd
= rD(ctx
->opcode
);
6307 gen_set_access_type(ctx
, ACCESS_FLOAT
);
6308 t0
= tcg_temp_new();
6309 gen_addr_reg_index(ctx
, t0
);
6310 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
6311 gen_addr_add(ctx
, t0
, t0
, 8);
6312 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
6316 /* BookE specific instructions */
6318 /* XXX: not implemented on 440 ? */
6319 static void gen_mfapidi(DisasContext
*ctx
)
6322 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6325 /* XXX: not implemented on 440 ? */
6326 static void gen_tlbiva(DisasContext
*ctx
)
6328 #if defined(CONFIG_USER_ONLY)
6334 t0
= tcg_temp_new();
6335 gen_addr_reg_index(ctx
, t0
);
6336 gen_helper_tlbiva(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6338 #endif /* defined(CONFIG_USER_ONLY) */
6341 /* All 405 MAC instructions are translated here */
6342 static inline void gen_405_mulladd_insn(DisasContext
*ctx
, int opc2
, int opc3
,
6343 int ra
, int rb
, int rt
, int Rc
)
6347 t0
= tcg_temp_local_new();
6348 t1
= tcg_temp_local_new();
6350 switch (opc3
& 0x0D) {
6352 /* macchw - macchw. - macchwo - macchwo. */
6353 /* macchws - macchws. - macchwso - macchwso. */
6354 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6355 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6356 /* mulchw - mulchw. */
6357 tcg_gen_ext16s_tl(t0
, cpu_gpr
[ra
]);
6358 tcg_gen_sari_tl(t1
, cpu_gpr
[rb
], 16);
6359 tcg_gen_ext16s_tl(t1
, t1
);
6362 /* macchwu - macchwu. - macchwuo - macchwuo. */
6363 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6364 /* mulchwu - mulchwu. */
6365 tcg_gen_ext16u_tl(t0
, cpu_gpr
[ra
]);
6366 tcg_gen_shri_tl(t1
, cpu_gpr
[rb
], 16);
6367 tcg_gen_ext16u_tl(t1
, t1
);
6370 /* machhw - machhw. - machhwo - machhwo. */
6371 /* machhws - machhws. - machhwso - machhwso. */
6372 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6373 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6374 /* mulhhw - mulhhw. */
6375 tcg_gen_sari_tl(t0
, cpu_gpr
[ra
], 16);
6376 tcg_gen_ext16s_tl(t0
, t0
);
6377 tcg_gen_sari_tl(t1
, cpu_gpr
[rb
], 16);
6378 tcg_gen_ext16s_tl(t1
, t1
);
6381 /* machhwu - machhwu. - machhwuo - machhwuo. */
6382 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
6383 /* mulhhwu - mulhhwu. */
6384 tcg_gen_shri_tl(t0
, cpu_gpr
[ra
], 16);
6385 tcg_gen_ext16u_tl(t0
, t0
);
6386 tcg_gen_shri_tl(t1
, cpu_gpr
[rb
], 16);
6387 tcg_gen_ext16u_tl(t1
, t1
);
6390 /* maclhw - maclhw. - maclhwo - maclhwo. */
6391 /* maclhws - maclhws. - maclhwso - maclhwso. */
6392 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
6393 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
6394 /* mullhw - mullhw. */
6395 tcg_gen_ext16s_tl(t0
, cpu_gpr
[ra
]);
6396 tcg_gen_ext16s_tl(t1
, cpu_gpr
[rb
]);
6399 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
6400 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
6401 /* mullhwu - mullhwu. */
6402 tcg_gen_ext16u_tl(t0
, cpu_gpr
[ra
]);
6403 tcg_gen_ext16u_tl(t1
, cpu_gpr
[rb
]);
6407 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
6408 tcg_gen_mul_tl(t1
, t0
, t1
);
6410 /* nmultiply-and-accumulate (0x0E) */
6411 tcg_gen_sub_tl(t0
, cpu_gpr
[rt
], t1
);
6413 /* multiply-and-accumulate (0x0C) */
6414 tcg_gen_add_tl(t0
, cpu_gpr
[rt
], t1
);
6418 /* Check overflow and/or saturate */
6419 TCGLabel
*l1
= gen_new_label();
6422 /* Start with XER OV disabled, the most likely case */
6423 tcg_gen_movi_tl(cpu_ov
, 0);
6427 tcg_gen_xor_tl(t1
, cpu_gpr
[rt
], t1
);
6428 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
6429 tcg_gen_xor_tl(t1
, cpu_gpr
[rt
], t0
);
6430 tcg_gen_brcondi_tl(TCG_COND_LT
, t1
, 0, l1
);
6433 tcg_gen_sari_tl(t0
, cpu_gpr
[rt
], 31);
6434 tcg_gen_xori_tl(t0
, t0
, 0x7fffffff);
6438 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
6441 tcg_gen_movi_tl(t0
, UINT32_MAX
);
6445 /* Check overflow */
6446 tcg_gen_movi_tl(cpu_ov
, 1);
6447 tcg_gen_movi_tl(cpu_so
, 1);
6450 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
6453 tcg_gen_mul_tl(cpu_gpr
[rt
], t0
, t1
);
6457 if (unlikely(Rc
) != 0) {
6459 gen_set_Rc0(ctx
, cpu_gpr
[rt
]);
6463 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6464 static void glue(gen_, name)(DisasContext *ctx) \
6466 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
6467 rD(ctx->opcode), Rc(ctx->opcode)); \
6470 /* macchw - macchw. */
6471 GEN_MAC_HANDLER(macchw
, 0x0C, 0x05);
6472 /* macchwo - macchwo. */
6473 GEN_MAC_HANDLER(macchwo
, 0x0C, 0x15);
6474 /* macchws - macchws. */
6475 GEN_MAC_HANDLER(macchws
, 0x0C, 0x07);
6476 /* macchwso - macchwso. */
6477 GEN_MAC_HANDLER(macchwso
, 0x0C, 0x17);
6478 /* macchwsu - macchwsu. */
6479 GEN_MAC_HANDLER(macchwsu
, 0x0C, 0x06);
6480 /* macchwsuo - macchwsuo. */
6481 GEN_MAC_HANDLER(macchwsuo
, 0x0C, 0x16);
6482 /* macchwu - macchwu. */
6483 GEN_MAC_HANDLER(macchwu
, 0x0C, 0x04);
6484 /* macchwuo - macchwuo. */
6485 GEN_MAC_HANDLER(macchwuo
, 0x0C, 0x14);
6486 /* machhw - machhw. */
6487 GEN_MAC_HANDLER(machhw
, 0x0C, 0x01);
6488 /* machhwo - machhwo. */
6489 GEN_MAC_HANDLER(machhwo
, 0x0C, 0x11);
6490 /* machhws - machhws. */
6491 GEN_MAC_HANDLER(machhws
, 0x0C, 0x03);
6492 /* machhwso - machhwso. */
6493 GEN_MAC_HANDLER(machhwso
, 0x0C, 0x13);
6494 /* machhwsu - machhwsu. */
6495 GEN_MAC_HANDLER(machhwsu
, 0x0C, 0x02);
6496 /* machhwsuo - machhwsuo. */
6497 GEN_MAC_HANDLER(machhwsuo
, 0x0C, 0x12);
6498 /* machhwu - machhwu. */
6499 GEN_MAC_HANDLER(machhwu
, 0x0C, 0x00);
6500 /* machhwuo - machhwuo. */
6501 GEN_MAC_HANDLER(machhwuo
, 0x0C, 0x10);
6502 /* maclhw - maclhw. */
6503 GEN_MAC_HANDLER(maclhw
, 0x0C, 0x0D);
6504 /* maclhwo - maclhwo. */
6505 GEN_MAC_HANDLER(maclhwo
, 0x0C, 0x1D);
6506 /* maclhws - maclhws. */
6507 GEN_MAC_HANDLER(maclhws
, 0x0C, 0x0F);
6508 /* maclhwso - maclhwso. */
6509 GEN_MAC_HANDLER(maclhwso
, 0x0C, 0x1F);
6510 /* maclhwu - maclhwu. */
6511 GEN_MAC_HANDLER(maclhwu
, 0x0C, 0x0C);
6512 /* maclhwuo - maclhwuo. */
6513 GEN_MAC_HANDLER(maclhwuo
, 0x0C, 0x1C);
6514 /* maclhwsu - maclhwsu. */
6515 GEN_MAC_HANDLER(maclhwsu
, 0x0C, 0x0E);
6516 /* maclhwsuo - maclhwsuo. */
6517 GEN_MAC_HANDLER(maclhwsuo
, 0x0C, 0x1E);
6518 /* nmacchw - nmacchw. */
6519 GEN_MAC_HANDLER(nmacchw
, 0x0E, 0x05);
6520 /* nmacchwo - nmacchwo. */
6521 GEN_MAC_HANDLER(nmacchwo
, 0x0E, 0x15);
6522 /* nmacchws - nmacchws. */
6523 GEN_MAC_HANDLER(nmacchws
, 0x0E, 0x07);
6524 /* nmacchwso - nmacchwso. */
6525 GEN_MAC_HANDLER(nmacchwso
, 0x0E, 0x17);
6526 /* nmachhw - nmachhw. */
6527 GEN_MAC_HANDLER(nmachhw
, 0x0E, 0x01);
6528 /* nmachhwo - nmachhwo. */
6529 GEN_MAC_HANDLER(nmachhwo
, 0x0E, 0x11);
6530 /* nmachhws - nmachhws. */
6531 GEN_MAC_HANDLER(nmachhws
, 0x0E, 0x03);
6532 /* nmachhwso - nmachhwso. */
6533 GEN_MAC_HANDLER(nmachhwso
, 0x0E, 0x13);
6534 /* nmaclhw - nmaclhw. */
6535 GEN_MAC_HANDLER(nmaclhw
, 0x0E, 0x0D);
6536 /* nmaclhwo - nmaclhwo. */
6537 GEN_MAC_HANDLER(nmaclhwo
, 0x0E, 0x1D);
6538 /* nmaclhws - nmaclhws. */
6539 GEN_MAC_HANDLER(nmaclhws
, 0x0E, 0x0F);
6540 /* nmaclhwso - nmaclhwso. */
6541 GEN_MAC_HANDLER(nmaclhwso
, 0x0E, 0x1F);
6543 /* mulchw - mulchw. */
6544 GEN_MAC_HANDLER(mulchw
, 0x08, 0x05);
6545 /* mulchwu - mulchwu. */
6546 GEN_MAC_HANDLER(mulchwu
, 0x08, 0x04);
6547 /* mulhhw - mulhhw. */
6548 GEN_MAC_HANDLER(mulhhw
, 0x08, 0x01);
6549 /* mulhhwu - mulhhwu. */
6550 GEN_MAC_HANDLER(mulhhwu
, 0x08, 0x00);
6551 /* mullhw - mullhw. */
6552 GEN_MAC_HANDLER(mullhw
, 0x08, 0x0D);
6553 /* mullhwu - mullhwu. */
6554 GEN_MAC_HANDLER(mullhwu
, 0x08, 0x0C);
6557 static void gen_mfdcr(DisasContext
*ctx
)
6559 #if defined(CONFIG_USER_ONLY)
6565 /* NIP cannot be restored if the memory exception comes from an helper */
6566 gen_update_nip(ctx
, ctx
->nip
- 4);
6567 dcrn
= tcg_const_tl(SPR(ctx
->opcode
));
6568 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, dcrn
);
6569 tcg_temp_free(dcrn
);
6570 #endif /* defined(CONFIG_USER_ONLY) */
6574 static void gen_mtdcr(DisasContext
*ctx
)
6576 #if defined(CONFIG_USER_ONLY)
6582 /* NIP cannot be restored if the memory exception comes from an helper */
6583 gen_update_nip(ctx
, ctx
->nip
- 4);
6584 dcrn
= tcg_const_tl(SPR(ctx
->opcode
));
6585 gen_helper_store_dcr(cpu_env
, dcrn
, cpu_gpr
[rS(ctx
->opcode
)]);
6586 tcg_temp_free(dcrn
);
6587 #endif /* defined(CONFIG_USER_ONLY) */
6591 /* XXX: not implemented on 440 ? */
6592 static void gen_mfdcrx(DisasContext
*ctx
)
6594 #if defined(CONFIG_USER_ONLY)
6598 /* NIP cannot be restored if the memory exception comes from an helper */
6599 gen_update_nip(ctx
, ctx
->nip
- 4);
6600 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6601 cpu_gpr
[rA(ctx
->opcode
)]);
6602 /* Note: Rc update flag set leads to undefined state of Rc0 */
6603 #endif /* defined(CONFIG_USER_ONLY) */
6607 /* XXX: not implemented on 440 ? */
6608 static void gen_mtdcrx(DisasContext
*ctx
)
6610 #if defined(CONFIG_USER_ONLY)
6614 /* NIP cannot be restored if the memory exception comes from an helper */
6615 gen_update_nip(ctx
, ctx
->nip
- 4);
6616 gen_helper_store_dcr(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6617 cpu_gpr
[rS(ctx
->opcode
)]);
6618 /* Note: Rc update flag set leads to undefined state of Rc0 */
6619 #endif /* defined(CONFIG_USER_ONLY) */
6622 /* mfdcrux (PPC 460) : user-mode access to DCR */
6623 static void gen_mfdcrux(DisasContext
*ctx
)
6625 /* NIP cannot be restored if the memory exception comes from an helper */
6626 gen_update_nip(ctx
, ctx
->nip
- 4);
6627 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6628 cpu_gpr
[rA(ctx
->opcode
)]);
6629 /* Note: Rc update flag set leads to undefined state of Rc0 */
6632 /* mtdcrux (PPC 460) : user-mode access to DCR */
6633 static void gen_mtdcrux(DisasContext
*ctx
)
6635 /* NIP cannot be restored if the memory exception comes from an helper */
6636 gen_update_nip(ctx
, ctx
->nip
- 4);
6637 gen_helper_store_dcr(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6638 cpu_gpr
[rS(ctx
->opcode
)]);
6639 /* Note: Rc update flag set leads to undefined state of Rc0 */
6643 static void gen_dccci(DisasContext
*ctx
)
6646 /* interpreted as no-op */
6650 static void gen_dcread(DisasContext
*ctx
)
6652 #if defined(CONFIG_USER_ONLY)
6658 gen_set_access_type(ctx
, ACCESS_CACHE
);
6659 EA
= tcg_temp_new();
6660 gen_addr_reg_index(ctx
, EA
);
6661 val
= tcg_temp_new();
6662 gen_qemu_ld32u(ctx
, val
, EA
);
6664 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], EA
);
6666 #endif /* defined(CONFIG_USER_ONLY) */
6670 static void gen_icbt_40x(DisasContext
*ctx
)
6672 /* interpreted as no-op */
6673 /* XXX: specification say this is treated as a load by the MMU
6674 * but does not generate any exception
6679 static void gen_iccci(DisasContext
*ctx
)
6682 /* interpreted as no-op */
6686 static void gen_icread(DisasContext
*ctx
)
6689 /* interpreted as no-op */
6692 /* rfci (supervisor only) */
6693 static void gen_rfci_40x(DisasContext
*ctx
)
6695 #if defined(CONFIG_USER_ONLY)
6699 /* Restore CPU state */
6700 gen_helper_40x_rfci(cpu_env
);
6701 gen_sync_exception(ctx
);
6702 #endif /* defined(CONFIG_USER_ONLY) */
6705 static void gen_rfci(DisasContext
*ctx
)
6707 #if defined(CONFIG_USER_ONLY)
6711 /* Restore CPU state */
6712 gen_helper_rfci(cpu_env
);
6713 gen_sync_exception(ctx
);
6714 #endif /* defined(CONFIG_USER_ONLY) */
6717 /* BookE specific */
6719 /* XXX: not implemented on 440 ? */
6720 static void gen_rfdi(DisasContext
*ctx
)
6722 #if defined(CONFIG_USER_ONLY)
6726 /* Restore CPU state */
6727 gen_helper_rfdi(cpu_env
);
6728 gen_sync_exception(ctx
);
6729 #endif /* defined(CONFIG_USER_ONLY) */
6732 /* XXX: not implemented on 440 ? */
6733 static void gen_rfmci(DisasContext
*ctx
)
6735 #if defined(CONFIG_USER_ONLY)
6739 /* Restore CPU state */
6740 gen_helper_rfmci(cpu_env
);
6741 gen_sync_exception(ctx
);
6742 #endif /* defined(CONFIG_USER_ONLY) */
6745 /* TLB management - PowerPC 405 implementation */
6748 static void gen_tlbre_40x(DisasContext
*ctx
)
6750 #if defined(CONFIG_USER_ONLY)
6754 switch (rB(ctx
->opcode
)) {
6756 gen_helper_4xx_tlbre_hi(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6757 cpu_gpr
[rA(ctx
->opcode
)]);
6760 gen_helper_4xx_tlbre_lo(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6761 cpu_gpr
[rA(ctx
->opcode
)]);
6764 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6767 #endif /* defined(CONFIG_USER_ONLY) */
6770 /* tlbsx - tlbsx. */
6771 static void gen_tlbsx_40x(DisasContext
*ctx
)
6773 #if defined(CONFIG_USER_ONLY)
6779 t0
= tcg_temp_new();
6780 gen_addr_reg_index(ctx
, t0
);
6781 gen_helper_4xx_tlbsx(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
6783 if (Rc(ctx
->opcode
)) {
6784 TCGLabel
*l1
= gen_new_label();
6785 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
6786 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rD(ctx
->opcode
)], -1, l1
);
6787 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 0x02);
6790 #endif /* defined(CONFIG_USER_ONLY) */
6794 static void gen_tlbwe_40x(DisasContext
*ctx
)
6796 #if defined(CONFIG_USER_ONLY)
6801 switch (rB(ctx
->opcode
)) {
6803 gen_helper_4xx_tlbwe_hi(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6804 cpu_gpr
[rS(ctx
->opcode
)]);
6807 gen_helper_4xx_tlbwe_lo(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6808 cpu_gpr
[rS(ctx
->opcode
)]);
6811 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6814 #endif /* defined(CONFIG_USER_ONLY) */
6817 /* TLB management - PowerPC 440 implementation */
6820 static void gen_tlbre_440(DisasContext
*ctx
)
6822 #if defined(CONFIG_USER_ONLY)
6827 switch (rB(ctx
->opcode
)) {
6832 TCGv_i32 t0
= tcg_const_i32(rB(ctx
->opcode
));
6833 gen_helper_440_tlbre(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6834 t0
, cpu_gpr
[rA(ctx
->opcode
)]);
6835 tcg_temp_free_i32(t0
);
6839 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6842 #endif /* defined(CONFIG_USER_ONLY) */
6845 /* tlbsx - tlbsx. */
6846 static void gen_tlbsx_440(DisasContext
*ctx
)
6848 #if defined(CONFIG_USER_ONLY)
6854 t0
= tcg_temp_new();
6855 gen_addr_reg_index(ctx
, t0
);
6856 gen_helper_440_tlbsx(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
6858 if (Rc(ctx
->opcode
)) {
6859 TCGLabel
*l1
= gen_new_label();
6860 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
6861 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rD(ctx
->opcode
)], -1, l1
);
6862 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 0x02);
6865 #endif /* defined(CONFIG_USER_ONLY) */
6869 static void gen_tlbwe_440(DisasContext
*ctx
)
6871 #if defined(CONFIG_USER_ONLY)
6875 switch (rB(ctx
->opcode
)) {
6880 TCGv_i32 t0
= tcg_const_i32(rB(ctx
->opcode
));
6881 gen_helper_440_tlbwe(cpu_env
, t0
, cpu_gpr
[rA(ctx
->opcode
)],
6882 cpu_gpr
[rS(ctx
->opcode
)]);
6883 tcg_temp_free_i32(t0
);
6887 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6890 #endif /* defined(CONFIG_USER_ONLY) */
6893 /* TLB management - PowerPC BookE 2.06 implementation */
6896 static void gen_tlbre_booke206(DisasContext
*ctx
)
6898 #if defined(CONFIG_USER_ONLY)
6902 gen_helper_booke206_tlbre(cpu_env
);
6903 #endif /* defined(CONFIG_USER_ONLY) */
6906 /* tlbsx - tlbsx. */
6907 static void gen_tlbsx_booke206(DisasContext
*ctx
)
6909 #if defined(CONFIG_USER_ONLY)
6915 if (rA(ctx
->opcode
)) {
6916 t0
= tcg_temp_new();
6917 tcg_gen_mov_tl(t0
, cpu_gpr
[rD(ctx
->opcode
)]);
6919 t0
= tcg_const_tl(0);
6922 tcg_gen_add_tl(t0
, t0
, cpu_gpr
[rB(ctx
->opcode
)]);
6923 gen_helper_booke206_tlbsx(cpu_env
, t0
);
6925 #endif /* defined(CONFIG_USER_ONLY) */
6929 static void gen_tlbwe_booke206(DisasContext
*ctx
)
6931 #if defined(CONFIG_USER_ONLY)
6935 gen_update_nip(ctx
, ctx
->nip
- 4);
6936 gen_helper_booke206_tlbwe(cpu_env
);
6937 #endif /* defined(CONFIG_USER_ONLY) */
6940 static void gen_tlbivax_booke206(DisasContext
*ctx
)
6942 #if defined(CONFIG_USER_ONLY)
6948 t0
= tcg_temp_new();
6949 gen_addr_reg_index(ctx
, t0
);
6950 gen_helper_booke206_tlbivax(cpu_env
, t0
);
6952 #endif /* defined(CONFIG_USER_ONLY) */
6955 static void gen_tlbilx_booke206(DisasContext
*ctx
)
6957 #if defined(CONFIG_USER_ONLY)
6963 t0
= tcg_temp_new();
6964 gen_addr_reg_index(ctx
, t0
);
6966 switch((ctx
->opcode
>> 21) & 0x3) {
6968 gen_helper_booke206_tlbilx0(cpu_env
, t0
);
6971 gen_helper_booke206_tlbilx1(cpu_env
, t0
);
6974 gen_helper_booke206_tlbilx3(cpu_env
, t0
);
6977 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6982 #endif /* defined(CONFIG_USER_ONLY) */
6987 static void gen_wrtee(DisasContext
*ctx
)
6989 #if defined(CONFIG_USER_ONLY)
6995 t0
= tcg_temp_new();
6996 tcg_gen_andi_tl(t0
, cpu_gpr
[rD(ctx
->opcode
)], (1 << MSR_EE
));
6997 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(1 << MSR_EE
));
6998 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
7000 /* Stop translation to have a chance to raise an exception
7001 * if we just set msr_ee to 1
7003 gen_stop_exception(ctx
);
7004 #endif /* defined(CONFIG_USER_ONLY) */
7008 static void gen_wrteei(DisasContext
*ctx
)
7010 #if defined(CONFIG_USER_ONLY)
7014 if (ctx
->opcode
& 0x00008000) {
7015 tcg_gen_ori_tl(cpu_msr
, cpu_msr
, (1 << MSR_EE
));
7016 /* Stop translation to have a chance to raise an exception */
7017 gen_stop_exception(ctx
);
7019 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(1 << MSR_EE
));
7021 #endif /* defined(CONFIG_USER_ONLY) */
7024 /* PowerPC 440 specific instructions */
7027 static void gen_dlmzb(DisasContext
*ctx
)
7029 TCGv_i32 t0
= tcg_const_i32(Rc(ctx
->opcode
));
7030 gen_helper_dlmzb(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
7031 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)], t0
);
7032 tcg_temp_free_i32(t0
);
7035 /* mbar replaces eieio on 440 */
7036 static void gen_mbar(DisasContext
*ctx
)
7038 /* interpreted as no-op */
7041 /* msync replaces sync on 440 */
7042 static void gen_msync_4xx(DisasContext
*ctx
)
7044 /* interpreted as no-op */
7048 static void gen_icbt_440(DisasContext
*ctx
)
7050 /* interpreted as no-op */
7051 /* XXX: specification say this is treated as a load by the MMU
7052 * but does not generate any exception
7056 /* Embedded.Processor Control */
7058 static void gen_msgclr(DisasContext
*ctx
)
7060 #if defined(CONFIG_USER_ONLY)
7064 gen_helper_msgclr(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
7065 #endif /* defined(CONFIG_USER_ONLY) */
7068 static void gen_msgsnd(DisasContext
*ctx
)
7070 #if defined(CONFIG_USER_ONLY)
7074 gen_helper_msgsnd(cpu_gpr
[rB(ctx
->opcode
)]);
7075 #endif /* defined(CONFIG_USER_ONLY) */
7078 /*** Altivec vector extension ***/
7079 /* Altivec registers moves */
7081 static inline TCGv_ptr
gen_avr_ptr(int reg
)
7083 TCGv_ptr r
= tcg_temp_new_ptr();
7084 tcg_gen_addi_ptr(r
, cpu_env
, offsetof(CPUPPCState
, avr
[reg
]));
7088 #define GEN_VR_LDX(name, opc2, opc3) \
7089 static void glue(gen_, name)(DisasContext *ctx) \
7092 if (unlikely(!ctx->altivec_enabled)) { \
7093 gen_exception(ctx, POWERPC_EXCP_VPU); \
7096 gen_set_access_type(ctx, ACCESS_INT); \
7097 EA = tcg_temp_new(); \
7098 gen_addr_reg_index(ctx, EA); \
7099 tcg_gen_andi_tl(EA, EA, ~0xf); \
7100 /* We only need to swap high and low halves. gen_qemu_ld64 does necessary \
7101 64-bit byteswap already. */ \
7102 if (ctx->le_mode) { \
7103 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
7104 tcg_gen_addi_tl(EA, EA, 8); \
7105 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
7107 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
7108 tcg_gen_addi_tl(EA, EA, 8); \
7109 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
7111 tcg_temp_free(EA); \
7114 #define GEN_VR_STX(name, opc2, opc3) \
7115 static void gen_st##name(DisasContext *ctx) \
7118 if (unlikely(!ctx->altivec_enabled)) { \
7119 gen_exception(ctx, POWERPC_EXCP_VPU); \
7122 gen_set_access_type(ctx, ACCESS_INT); \
7123 EA = tcg_temp_new(); \
7124 gen_addr_reg_index(ctx, EA); \
7125 tcg_gen_andi_tl(EA, EA, ~0xf); \
7126 /* We only need to swap high and low halves. gen_qemu_st64 does necessary \
7127 64-bit byteswap already. */ \
7128 if (ctx->le_mode) { \
7129 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
7130 tcg_gen_addi_tl(EA, EA, 8); \
7131 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
7133 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
7134 tcg_gen_addi_tl(EA, EA, 8); \
7135 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
7137 tcg_temp_free(EA); \
7140 #define GEN_VR_LVE(name, opc2, opc3, size) \
7141 static void gen_lve##name(DisasContext *ctx) \
7145 if (unlikely(!ctx->altivec_enabled)) { \
7146 gen_exception(ctx, POWERPC_EXCP_VPU); \
7149 gen_set_access_type(ctx, ACCESS_INT); \
7150 EA = tcg_temp_new(); \
7151 gen_addr_reg_index(ctx, EA); \
7153 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
7155 rs = gen_avr_ptr(rS(ctx->opcode)); \
7156 gen_helper_lve##name(cpu_env, rs, EA); \
7157 tcg_temp_free(EA); \
7158 tcg_temp_free_ptr(rs); \
7161 #define GEN_VR_STVE(name, opc2, opc3, size) \
7162 static void gen_stve##name(DisasContext *ctx) \
7166 if (unlikely(!ctx->altivec_enabled)) { \
7167 gen_exception(ctx, POWERPC_EXCP_VPU); \
7170 gen_set_access_type(ctx, ACCESS_INT); \
7171 EA = tcg_temp_new(); \
7172 gen_addr_reg_index(ctx, EA); \
7174 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
7176 rs = gen_avr_ptr(rS(ctx->opcode)); \
7177 gen_helper_stve##name(cpu_env, rs, EA); \
7178 tcg_temp_free(EA); \
7179 tcg_temp_free_ptr(rs); \
7182 GEN_VR_LDX(lvx
, 0x07, 0x03);
7183 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
7184 GEN_VR_LDX(lvxl
, 0x07, 0x0B);
7186 GEN_VR_LVE(bx
, 0x07, 0x00, 1);
7187 GEN_VR_LVE(hx
, 0x07, 0x01, 2);
7188 GEN_VR_LVE(wx
, 0x07, 0x02, 4);
7190 GEN_VR_STX(svx
, 0x07, 0x07);
7191 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
7192 GEN_VR_STX(svxl
, 0x07, 0x0F);
7194 GEN_VR_STVE(bx
, 0x07, 0x04, 1);
7195 GEN_VR_STVE(hx
, 0x07, 0x05, 2);
7196 GEN_VR_STVE(wx
, 0x07, 0x06, 4);
7198 static void gen_lvsl(DisasContext
*ctx
)
7202 if (unlikely(!ctx
->altivec_enabled
)) {
7203 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7206 EA
= tcg_temp_new();
7207 gen_addr_reg_index(ctx
, EA
);
7208 rd
= gen_avr_ptr(rD(ctx
->opcode
));
7209 gen_helper_lvsl(rd
, EA
);
7211 tcg_temp_free_ptr(rd
);
7214 static void gen_lvsr(DisasContext
*ctx
)
7218 if (unlikely(!ctx
->altivec_enabled
)) {
7219 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7222 EA
= tcg_temp_new();
7223 gen_addr_reg_index(ctx
, EA
);
7224 rd
= gen_avr_ptr(rD(ctx
->opcode
));
7225 gen_helper_lvsr(rd
, EA
);
7227 tcg_temp_free_ptr(rd
);
7230 static void gen_mfvscr(DisasContext
*ctx
)
7233 if (unlikely(!ctx
->altivec_enabled
)) {
7234 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7237 tcg_gen_movi_i64(cpu_avrh
[rD(ctx
->opcode
)], 0);
7238 t
= tcg_temp_new_i32();
7239 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUPPCState
, vscr
));
7240 tcg_gen_extu_i32_i64(cpu_avrl
[rD(ctx
->opcode
)], t
);
7241 tcg_temp_free_i32(t
);
7244 static void gen_mtvscr(DisasContext
*ctx
)
7247 if (unlikely(!ctx
->altivec_enabled
)) {
7248 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7251 p
= gen_avr_ptr(rB(ctx
->opcode
));
7252 gen_helper_mtvscr(cpu_env
, p
);
7253 tcg_temp_free_ptr(p
);
7256 /* Logical operations */
7257 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
7258 static void glue(gen_, name)(DisasContext *ctx) \
7260 if (unlikely(!ctx->altivec_enabled)) { \
7261 gen_exception(ctx, POWERPC_EXCP_VPU); \
7264 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
7265 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
7268 GEN_VX_LOGICAL(vand
, tcg_gen_and_i64
, 2, 16);
7269 GEN_VX_LOGICAL(vandc
, tcg_gen_andc_i64
, 2, 17);
7270 GEN_VX_LOGICAL(vor
, tcg_gen_or_i64
, 2, 18);
7271 GEN_VX_LOGICAL(vxor
, tcg_gen_xor_i64
, 2, 19);
7272 GEN_VX_LOGICAL(vnor
, tcg_gen_nor_i64
, 2, 20);
7273 GEN_VX_LOGICAL(veqv
, tcg_gen_eqv_i64
, 2, 26);
7274 GEN_VX_LOGICAL(vnand
, tcg_gen_nand_i64
, 2, 22);
7275 GEN_VX_LOGICAL(vorc
, tcg_gen_orc_i64
, 2, 21);
7277 #define GEN_VXFORM(name, opc2, opc3) \
7278 static void glue(gen_, name)(DisasContext *ctx) \
7280 TCGv_ptr ra, rb, rd; \
7281 if (unlikely(!ctx->altivec_enabled)) { \
7282 gen_exception(ctx, POWERPC_EXCP_VPU); \
7285 ra = gen_avr_ptr(rA(ctx->opcode)); \
7286 rb = gen_avr_ptr(rB(ctx->opcode)); \
7287 rd = gen_avr_ptr(rD(ctx->opcode)); \
7288 gen_helper_##name (rd, ra, rb); \
7289 tcg_temp_free_ptr(ra); \
7290 tcg_temp_free_ptr(rb); \
7291 tcg_temp_free_ptr(rd); \
7294 #define GEN_VXFORM_ENV(name, opc2, opc3) \
7295 static void glue(gen_, name)(DisasContext *ctx) \
7297 TCGv_ptr ra, rb, rd; \
7298 if (unlikely(!ctx->altivec_enabled)) { \
7299 gen_exception(ctx, POWERPC_EXCP_VPU); \
7302 ra = gen_avr_ptr(rA(ctx->opcode)); \
7303 rb = gen_avr_ptr(rB(ctx->opcode)); \
7304 rd = gen_avr_ptr(rD(ctx->opcode)); \
7305 gen_helper_##name(cpu_env, rd, ra, rb); \
7306 tcg_temp_free_ptr(ra); \
7307 tcg_temp_free_ptr(rb); \
7308 tcg_temp_free_ptr(rd); \
7311 #define GEN_VXFORM3(name, opc2, opc3) \
7312 static void glue(gen_, name)(DisasContext *ctx) \
7314 TCGv_ptr ra, rb, rc, rd; \
7315 if (unlikely(!ctx->altivec_enabled)) { \
7316 gen_exception(ctx, POWERPC_EXCP_VPU); \
7319 ra = gen_avr_ptr(rA(ctx->opcode)); \
7320 rb = gen_avr_ptr(rB(ctx->opcode)); \
7321 rc = gen_avr_ptr(rC(ctx->opcode)); \
7322 rd = gen_avr_ptr(rD(ctx->opcode)); \
7323 gen_helper_##name(rd, ra, rb, rc); \
7324 tcg_temp_free_ptr(ra); \
7325 tcg_temp_free_ptr(rb); \
7326 tcg_temp_free_ptr(rc); \
7327 tcg_temp_free_ptr(rd); \
7331 * Support for Altivec instruction pairs that use bit 31 (Rc) as
7332 * an opcode bit. In general, these pairs come from different
7333 * versions of the ISA, so we must also support a pair of flags for
7336 #define GEN_VXFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
7337 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7339 if ((Rc(ctx->opcode) == 0) && \
7340 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
7342 } else if ((Rc(ctx->opcode) == 1) && \
7343 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
7346 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
7350 GEN_VXFORM(vaddubm
, 0, 0);
7351 GEN_VXFORM(vadduhm
, 0, 1);
7352 GEN_VXFORM(vadduwm
, 0, 2);
7353 GEN_VXFORM(vaddudm
, 0, 3);
7354 GEN_VXFORM(vsububm
, 0, 16);
7355 GEN_VXFORM(vsubuhm
, 0, 17);
7356 GEN_VXFORM(vsubuwm
, 0, 18);
7357 GEN_VXFORM(vsubudm
, 0, 19);
7358 GEN_VXFORM(vmaxub
, 1, 0);
7359 GEN_VXFORM(vmaxuh
, 1, 1);
7360 GEN_VXFORM(vmaxuw
, 1, 2);
7361 GEN_VXFORM(vmaxud
, 1, 3);
7362 GEN_VXFORM(vmaxsb
, 1, 4);
7363 GEN_VXFORM(vmaxsh
, 1, 5);
7364 GEN_VXFORM(vmaxsw
, 1, 6);
7365 GEN_VXFORM(vmaxsd
, 1, 7);
7366 GEN_VXFORM(vminub
, 1, 8);
7367 GEN_VXFORM(vminuh
, 1, 9);
7368 GEN_VXFORM(vminuw
, 1, 10);
7369 GEN_VXFORM(vminud
, 1, 11);
7370 GEN_VXFORM(vminsb
, 1, 12);
7371 GEN_VXFORM(vminsh
, 1, 13);
7372 GEN_VXFORM(vminsw
, 1, 14);
7373 GEN_VXFORM(vminsd
, 1, 15);
7374 GEN_VXFORM(vavgub
, 1, 16);
7375 GEN_VXFORM(vavguh
, 1, 17);
7376 GEN_VXFORM(vavguw
, 1, 18);
7377 GEN_VXFORM(vavgsb
, 1, 20);
7378 GEN_VXFORM(vavgsh
, 1, 21);
7379 GEN_VXFORM(vavgsw
, 1, 22);
7380 GEN_VXFORM(vmrghb
, 6, 0);
7381 GEN_VXFORM(vmrghh
, 6, 1);
7382 GEN_VXFORM(vmrghw
, 6, 2);
7383 GEN_VXFORM(vmrglb
, 6, 4);
7384 GEN_VXFORM(vmrglh
, 6, 5);
7385 GEN_VXFORM(vmrglw
, 6, 6);
7387 static void gen_vmrgew(DisasContext
*ctx
)
7391 if (unlikely(!ctx
->altivec_enabled
)) {
7392 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7395 VT
= rD(ctx
->opcode
);
7396 VA
= rA(ctx
->opcode
);
7397 VB
= rB(ctx
->opcode
);
7398 tmp
= tcg_temp_new_i64();
7399 tcg_gen_shri_i64(tmp
, cpu_avrh
[VB
], 32);
7400 tcg_gen_deposit_i64(cpu_avrh
[VT
], cpu_avrh
[VA
], tmp
, 0, 32);
7401 tcg_gen_shri_i64(tmp
, cpu_avrl
[VB
], 32);
7402 tcg_gen_deposit_i64(cpu_avrl
[VT
], cpu_avrl
[VA
], tmp
, 0, 32);
7403 tcg_temp_free_i64(tmp
);
7406 static void gen_vmrgow(DisasContext
*ctx
)
7409 if (unlikely(!ctx
->altivec_enabled
)) {
7410 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7413 VT
= rD(ctx
->opcode
);
7414 VA
= rA(ctx
->opcode
);
7415 VB
= rB(ctx
->opcode
);
7417 tcg_gen_deposit_i64(cpu_avrh
[VT
], cpu_avrh
[VB
], cpu_avrh
[VA
], 32, 32);
7418 tcg_gen_deposit_i64(cpu_avrl
[VT
], cpu_avrl
[VB
], cpu_avrl
[VA
], 32, 32);
7421 GEN_VXFORM(vmuloub
, 4, 0);
7422 GEN_VXFORM(vmulouh
, 4, 1);
7423 GEN_VXFORM(vmulouw
, 4, 2);
7424 GEN_VXFORM(vmuluwm
, 4, 2);
7425 GEN_VXFORM_DUAL(vmulouw
, PPC_ALTIVEC
, PPC_NONE
,
7426 vmuluwm
, PPC_NONE
, PPC2_ALTIVEC_207
)
7427 GEN_VXFORM(vmulosb
, 4, 4);
7428 GEN_VXFORM(vmulosh
, 4, 5);
7429 GEN_VXFORM(vmulosw
, 4, 6);
7430 GEN_VXFORM(vmuleub
, 4, 8);
7431 GEN_VXFORM(vmuleuh
, 4, 9);
7432 GEN_VXFORM(vmuleuw
, 4, 10);
7433 GEN_VXFORM(vmulesb
, 4, 12);
7434 GEN_VXFORM(vmulesh
, 4, 13);
7435 GEN_VXFORM(vmulesw
, 4, 14);
7436 GEN_VXFORM(vslb
, 2, 4);
7437 GEN_VXFORM(vslh
, 2, 5);
7438 GEN_VXFORM(vslw
, 2, 6);
7439 GEN_VXFORM(vsld
, 2, 23);
7440 GEN_VXFORM(vsrb
, 2, 8);
7441 GEN_VXFORM(vsrh
, 2, 9);
7442 GEN_VXFORM(vsrw
, 2, 10);
7443 GEN_VXFORM(vsrd
, 2, 27);
7444 GEN_VXFORM(vsrab
, 2, 12);
7445 GEN_VXFORM(vsrah
, 2, 13);
7446 GEN_VXFORM(vsraw
, 2, 14);
7447 GEN_VXFORM(vsrad
, 2, 15);
7448 GEN_VXFORM(vslo
, 6, 16);
7449 GEN_VXFORM(vsro
, 6, 17);
7450 GEN_VXFORM(vaddcuw
, 0, 6);
7451 GEN_VXFORM(vsubcuw
, 0, 22);
7452 GEN_VXFORM_ENV(vaddubs
, 0, 8);
7453 GEN_VXFORM_ENV(vadduhs
, 0, 9);
7454 GEN_VXFORM_ENV(vadduws
, 0, 10);
7455 GEN_VXFORM_ENV(vaddsbs
, 0, 12);
7456 GEN_VXFORM_ENV(vaddshs
, 0, 13);
7457 GEN_VXFORM_ENV(vaddsws
, 0, 14);
7458 GEN_VXFORM_ENV(vsububs
, 0, 24);
7459 GEN_VXFORM_ENV(vsubuhs
, 0, 25);
7460 GEN_VXFORM_ENV(vsubuws
, 0, 26);
7461 GEN_VXFORM_ENV(vsubsbs
, 0, 28);
7462 GEN_VXFORM_ENV(vsubshs
, 0, 29);
7463 GEN_VXFORM_ENV(vsubsws
, 0, 30);
7464 GEN_VXFORM(vadduqm
, 0, 4);
7465 GEN_VXFORM(vaddcuq
, 0, 5);
7466 GEN_VXFORM3(vaddeuqm
, 30, 0);
7467 GEN_VXFORM3(vaddecuq
, 30, 0);
7468 GEN_VXFORM_DUAL(vaddeuqm
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7469 vaddecuq
, PPC_NONE
, PPC2_ALTIVEC_207
)
7470 GEN_VXFORM(vsubuqm
, 0, 20);
7471 GEN_VXFORM(vsubcuq
, 0, 21);
7472 GEN_VXFORM3(vsubeuqm
, 31, 0);
7473 GEN_VXFORM3(vsubecuq
, 31, 0);
7474 GEN_VXFORM_DUAL(vsubeuqm
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7475 vsubecuq
, PPC_NONE
, PPC2_ALTIVEC_207
)
7476 GEN_VXFORM(vrlb
, 2, 0);
7477 GEN_VXFORM(vrlh
, 2, 1);
7478 GEN_VXFORM(vrlw
, 2, 2);
7479 GEN_VXFORM(vrld
, 2, 3);
7480 GEN_VXFORM(vsl
, 2, 7);
7481 GEN_VXFORM(vsr
, 2, 11);
7482 GEN_VXFORM_ENV(vpkuhum
, 7, 0);
7483 GEN_VXFORM_ENV(vpkuwum
, 7, 1);
7484 GEN_VXFORM_ENV(vpkudum
, 7, 17);
7485 GEN_VXFORM_ENV(vpkuhus
, 7, 2);
7486 GEN_VXFORM_ENV(vpkuwus
, 7, 3);
7487 GEN_VXFORM_ENV(vpkudus
, 7, 19);
7488 GEN_VXFORM_ENV(vpkshus
, 7, 4);
7489 GEN_VXFORM_ENV(vpkswus
, 7, 5);
7490 GEN_VXFORM_ENV(vpksdus
, 7, 21);
7491 GEN_VXFORM_ENV(vpkshss
, 7, 6);
7492 GEN_VXFORM_ENV(vpkswss
, 7, 7);
7493 GEN_VXFORM_ENV(vpksdss
, 7, 23);
7494 GEN_VXFORM(vpkpx
, 7, 12);
7495 GEN_VXFORM_ENV(vsum4ubs
, 4, 24);
7496 GEN_VXFORM_ENV(vsum4sbs
, 4, 28);
7497 GEN_VXFORM_ENV(vsum4shs
, 4, 25);
7498 GEN_VXFORM_ENV(vsum2sws
, 4, 26);
7499 GEN_VXFORM_ENV(vsumsws
, 4, 30);
7500 GEN_VXFORM_ENV(vaddfp
, 5, 0);
7501 GEN_VXFORM_ENV(vsubfp
, 5, 1);
7502 GEN_VXFORM_ENV(vmaxfp
, 5, 16);
7503 GEN_VXFORM_ENV(vminfp
, 5, 17);
7505 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
7506 static void glue(gen_, name)(DisasContext *ctx) \
7508 TCGv_ptr ra, rb, rd; \
7509 if (unlikely(!ctx->altivec_enabled)) { \
7510 gen_exception(ctx, POWERPC_EXCP_VPU); \
7513 ra = gen_avr_ptr(rA(ctx->opcode)); \
7514 rb = gen_avr_ptr(rB(ctx->opcode)); \
7515 rd = gen_avr_ptr(rD(ctx->opcode)); \
7516 gen_helper_##opname(cpu_env, rd, ra, rb); \
7517 tcg_temp_free_ptr(ra); \
7518 tcg_temp_free_ptr(rb); \
7519 tcg_temp_free_ptr(rd); \
7522 #define GEN_VXRFORM(name, opc2, opc3) \
7523 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
7524 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
7527 * Support for Altivec instructions that use bit 31 (Rc) as an opcode
7528 * bit but also use bit 21 as an actual Rc bit. In general, thse pairs
7529 * come from different versions of the ISA, so we must also support a
7530 * pair of flags for each instruction.
7532 #define GEN_VXRFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
7533 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7535 if ((Rc(ctx->opcode) == 0) && \
7536 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
7537 if (Rc21(ctx->opcode) == 0) { \
7540 gen_##name0##_(ctx); \
7542 } else if ((Rc(ctx->opcode) == 1) && \
7543 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
7544 if (Rc21(ctx->opcode) == 0) { \
7547 gen_##name1##_(ctx); \
7550 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
7554 GEN_VXRFORM(vcmpequb
, 3, 0)
7555 GEN_VXRFORM(vcmpequh
, 3, 1)
7556 GEN_VXRFORM(vcmpequw
, 3, 2)
7557 GEN_VXRFORM(vcmpequd
, 3, 3)
7558 GEN_VXRFORM(vcmpgtsb
, 3, 12)
7559 GEN_VXRFORM(vcmpgtsh
, 3, 13)
7560 GEN_VXRFORM(vcmpgtsw
, 3, 14)
7561 GEN_VXRFORM(vcmpgtsd
, 3, 15)
7562 GEN_VXRFORM(vcmpgtub
, 3, 8)
7563 GEN_VXRFORM(vcmpgtuh
, 3, 9)
7564 GEN_VXRFORM(vcmpgtuw
, 3, 10)
7565 GEN_VXRFORM(vcmpgtud
, 3, 11)
7566 GEN_VXRFORM(vcmpeqfp
, 3, 3)
7567 GEN_VXRFORM(vcmpgefp
, 3, 7)
7568 GEN_VXRFORM(vcmpgtfp
, 3, 11)
7569 GEN_VXRFORM(vcmpbfp
, 3, 15)
7571 GEN_VXRFORM_DUAL(vcmpeqfp
, PPC_ALTIVEC
, PPC_NONE
, \
7572 vcmpequd
, PPC_NONE
, PPC2_ALTIVEC_207
)
7573 GEN_VXRFORM_DUAL(vcmpbfp
, PPC_ALTIVEC
, PPC_NONE
, \
7574 vcmpgtsd
, PPC_NONE
, PPC2_ALTIVEC_207
)
7575 GEN_VXRFORM_DUAL(vcmpgtfp
, PPC_ALTIVEC
, PPC_NONE
, \
7576 vcmpgtud
, PPC_NONE
, PPC2_ALTIVEC_207
)
7578 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
7579 static void glue(gen_, name)(DisasContext *ctx) \
7583 if (unlikely(!ctx->altivec_enabled)) { \
7584 gen_exception(ctx, POWERPC_EXCP_VPU); \
7587 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
7588 rd = gen_avr_ptr(rD(ctx->opcode)); \
7589 gen_helper_##name (rd, simm); \
7590 tcg_temp_free_i32(simm); \
7591 tcg_temp_free_ptr(rd); \
7594 GEN_VXFORM_SIMM(vspltisb
, 6, 12);
7595 GEN_VXFORM_SIMM(vspltish
, 6, 13);
7596 GEN_VXFORM_SIMM(vspltisw
, 6, 14);
7598 #define GEN_VXFORM_NOA(name, opc2, opc3) \
7599 static void glue(gen_, name)(DisasContext *ctx) \
7602 if (unlikely(!ctx->altivec_enabled)) { \
7603 gen_exception(ctx, POWERPC_EXCP_VPU); \
7606 rb = gen_avr_ptr(rB(ctx->opcode)); \
7607 rd = gen_avr_ptr(rD(ctx->opcode)); \
7608 gen_helper_##name (rd, rb); \
7609 tcg_temp_free_ptr(rb); \
7610 tcg_temp_free_ptr(rd); \
7613 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
7614 static void glue(gen_, name)(DisasContext *ctx) \
7618 if (unlikely(!ctx->altivec_enabled)) { \
7619 gen_exception(ctx, POWERPC_EXCP_VPU); \
7622 rb = gen_avr_ptr(rB(ctx->opcode)); \
7623 rd = gen_avr_ptr(rD(ctx->opcode)); \
7624 gen_helper_##name(cpu_env, rd, rb); \
7625 tcg_temp_free_ptr(rb); \
7626 tcg_temp_free_ptr(rd); \
7629 GEN_VXFORM_NOA(vupkhsb
, 7, 8);
7630 GEN_VXFORM_NOA(vupkhsh
, 7, 9);
7631 GEN_VXFORM_NOA(vupkhsw
, 7, 25);
7632 GEN_VXFORM_NOA(vupklsb
, 7, 10);
7633 GEN_VXFORM_NOA(vupklsh
, 7, 11);
7634 GEN_VXFORM_NOA(vupklsw
, 7, 27);
7635 GEN_VXFORM_NOA(vupkhpx
, 7, 13);
7636 GEN_VXFORM_NOA(vupklpx
, 7, 15);
7637 GEN_VXFORM_NOA_ENV(vrefp
, 5, 4);
7638 GEN_VXFORM_NOA_ENV(vrsqrtefp
, 5, 5);
7639 GEN_VXFORM_NOA_ENV(vexptefp
, 5, 6);
7640 GEN_VXFORM_NOA_ENV(vlogefp
, 5, 7);
7641 GEN_VXFORM_NOA_ENV(vrfim
, 5, 11);
7642 GEN_VXFORM_NOA_ENV(vrfin
, 5, 8);
7643 GEN_VXFORM_NOA_ENV(vrfip
, 5, 10);
7644 GEN_VXFORM_NOA_ENV(vrfiz
, 5, 9);
7646 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
7647 static void glue(gen_, name)(DisasContext *ctx) \
7651 if (unlikely(!ctx->altivec_enabled)) { \
7652 gen_exception(ctx, POWERPC_EXCP_VPU); \
7655 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
7656 rd = gen_avr_ptr(rD(ctx->opcode)); \
7657 gen_helper_##name (rd, simm); \
7658 tcg_temp_free_i32(simm); \
7659 tcg_temp_free_ptr(rd); \
7662 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
7663 static void glue(gen_, name)(DisasContext *ctx) \
7667 if (unlikely(!ctx->altivec_enabled)) { \
7668 gen_exception(ctx, POWERPC_EXCP_VPU); \
7671 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
7672 rb = gen_avr_ptr(rB(ctx->opcode)); \
7673 rd = gen_avr_ptr(rD(ctx->opcode)); \
7674 gen_helper_##name (rd, rb, uimm); \
7675 tcg_temp_free_i32(uimm); \
7676 tcg_temp_free_ptr(rb); \
7677 tcg_temp_free_ptr(rd); \
7680 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
7681 static void glue(gen_, name)(DisasContext *ctx) \
7686 if (unlikely(!ctx->altivec_enabled)) { \
7687 gen_exception(ctx, POWERPC_EXCP_VPU); \
7690 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
7691 rb = gen_avr_ptr(rB(ctx->opcode)); \
7692 rd = gen_avr_ptr(rD(ctx->opcode)); \
7693 gen_helper_##name(cpu_env, rd, rb, uimm); \
7694 tcg_temp_free_i32(uimm); \
7695 tcg_temp_free_ptr(rb); \
7696 tcg_temp_free_ptr(rd); \
7699 GEN_VXFORM_UIMM(vspltb
, 6, 8);
7700 GEN_VXFORM_UIMM(vsplth
, 6, 9);
7701 GEN_VXFORM_UIMM(vspltw
, 6, 10);
7702 GEN_VXFORM_UIMM_ENV(vcfux
, 5, 12);
7703 GEN_VXFORM_UIMM_ENV(vcfsx
, 5, 13);
7704 GEN_VXFORM_UIMM_ENV(vctuxs
, 5, 14);
7705 GEN_VXFORM_UIMM_ENV(vctsxs
, 5, 15);
7707 static void gen_vsldoi(DisasContext
*ctx
)
7709 TCGv_ptr ra
, rb
, rd
;
7711 if (unlikely(!ctx
->altivec_enabled
)) {
7712 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7715 ra
= gen_avr_ptr(rA(ctx
->opcode
));
7716 rb
= gen_avr_ptr(rB(ctx
->opcode
));
7717 rd
= gen_avr_ptr(rD(ctx
->opcode
));
7718 sh
= tcg_const_i32(VSH(ctx
->opcode
));
7719 gen_helper_vsldoi (rd
, ra
, rb
, sh
);
7720 tcg_temp_free_ptr(ra
);
7721 tcg_temp_free_ptr(rb
);
7722 tcg_temp_free_ptr(rd
);
7723 tcg_temp_free_i32(sh
);
7726 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
7727 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7729 TCGv_ptr ra, rb, rc, rd; \
7730 if (unlikely(!ctx->altivec_enabled)) { \
7731 gen_exception(ctx, POWERPC_EXCP_VPU); \
7734 ra = gen_avr_ptr(rA(ctx->opcode)); \
7735 rb = gen_avr_ptr(rB(ctx->opcode)); \
7736 rc = gen_avr_ptr(rC(ctx->opcode)); \
7737 rd = gen_avr_ptr(rD(ctx->opcode)); \
7738 if (Rc(ctx->opcode)) { \
7739 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
7741 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
7743 tcg_temp_free_ptr(ra); \
7744 tcg_temp_free_ptr(rb); \
7745 tcg_temp_free_ptr(rc); \
7746 tcg_temp_free_ptr(rd); \
7749 GEN_VAFORM_PAIRED(vmhaddshs
, vmhraddshs
, 16)
7751 static void gen_vmladduhm(DisasContext
*ctx
)
7753 TCGv_ptr ra
, rb
, rc
, rd
;
7754 if (unlikely(!ctx
->altivec_enabled
)) {
7755 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7758 ra
= gen_avr_ptr(rA(ctx
->opcode
));
7759 rb
= gen_avr_ptr(rB(ctx
->opcode
));
7760 rc
= gen_avr_ptr(rC(ctx
->opcode
));
7761 rd
= gen_avr_ptr(rD(ctx
->opcode
));
7762 gen_helper_vmladduhm(rd
, ra
, rb
, rc
);
7763 tcg_temp_free_ptr(ra
);
7764 tcg_temp_free_ptr(rb
);
7765 tcg_temp_free_ptr(rc
);
7766 tcg_temp_free_ptr(rd
);
7769 GEN_VAFORM_PAIRED(vmsumubm
, vmsummbm
, 18)
7770 GEN_VAFORM_PAIRED(vmsumuhm
, vmsumuhs
, 19)
7771 GEN_VAFORM_PAIRED(vmsumshm
, vmsumshs
, 20)
7772 GEN_VAFORM_PAIRED(vsel
, vperm
, 21)
7773 GEN_VAFORM_PAIRED(vmaddfp
, vnmsubfp
, 23)
7775 #if defined(TARGET_PPC64)
7776 static void gen_maddld(DisasContext
*ctx
)
7778 TCGv_i64 t1
= tcg_temp_new_i64();
7780 tcg_gen_mul_i64(t1
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
7781 tcg_gen_add_i64(cpu_gpr
[rD(ctx
->opcode
)], t1
, cpu_gpr
[rC(ctx
->opcode
)]);
7782 tcg_temp_free_i64(t1
);
7785 /* maddhd maddhdu */
7786 static void gen_maddhd_maddhdu(DisasContext
*ctx
)
7788 TCGv_i64 lo
= tcg_temp_new_i64();
7789 TCGv_i64 hi
= tcg_temp_new_i64();
7790 TCGv_i64 t1
= tcg_temp_new_i64();
7792 if (Rc(ctx
->opcode
)) {
7793 tcg_gen_mulu2_i64(lo
, hi
, cpu_gpr
[rA(ctx
->opcode
)],
7794 cpu_gpr
[rB(ctx
->opcode
)]);
7795 tcg_gen_movi_i64(t1
, 0);
7797 tcg_gen_muls2_i64(lo
, hi
, cpu_gpr
[rA(ctx
->opcode
)],
7798 cpu_gpr
[rB(ctx
->opcode
)]);
7799 tcg_gen_sari_i64(t1
, cpu_gpr
[rC(ctx
->opcode
)], 63);
7801 tcg_gen_add2_i64(t1
, cpu_gpr
[rD(ctx
->opcode
)], lo
, hi
,
7802 cpu_gpr
[rC(ctx
->opcode
)], t1
);
7803 tcg_temp_free_i64(lo
);
7804 tcg_temp_free_i64(hi
);
7805 tcg_temp_free_i64(t1
);
7807 #endif /* defined(TARGET_PPC64) */
7809 GEN_VXFORM_NOA(vclzb
, 1, 28)
7810 GEN_VXFORM_NOA(vclzh
, 1, 29)
7811 GEN_VXFORM_NOA(vclzw
, 1, 30)
7812 GEN_VXFORM_NOA(vclzd
, 1, 31)
7813 GEN_VXFORM_NOA(vpopcntb
, 1, 28)
7814 GEN_VXFORM_NOA(vpopcnth
, 1, 29)
7815 GEN_VXFORM_NOA(vpopcntw
, 1, 30)
7816 GEN_VXFORM_NOA(vpopcntd
, 1, 31)
7817 GEN_VXFORM_DUAL(vclzb
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7818 vpopcntb
, PPC_NONE
, PPC2_ALTIVEC_207
)
7819 GEN_VXFORM_DUAL(vclzh
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7820 vpopcnth
, PPC_NONE
, PPC2_ALTIVEC_207
)
7821 GEN_VXFORM_DUAL(vclzw
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7822 vpopcntw
, PPC_NONE
, PPC2_ALTIVEC_207
)
7823 GEN_VXFORM_DUAL(vclzd
, PPC_NONE
, PPC2_ALTIVEC_207
, \
7824 vpopcntd
, PPC_NONE
, PPC2_ALTIVEC_207
)
7825 GEN_VXFORM(vbpermq
, 6, 21);
7826 GEN_VXFORM_NOA(vgbbd
, 6, 20);
7827 GEN_VXFORM(vpmsumb
, 4, 16)
7828 GEN_VXFORM(vpmsumh
, 4, 17)
7829 GEN_VXFORM(vpmsumw
, 4, 18)
7830 GEN_VXFORM(vpmsumd
, 4, 19)
7832 #define GEN_BCD(op) \
7833 static void gen_##op(DisasContext *ctx) \
7835 TCGv_ptr ra, rb, rd; \
7838 if (unlikely(!ctx->altivec_enabled)) { \
7839 gen_exception(ctx, POWERPC_EXCP_VPU); \
7843 ra = gen_avr_ptr(rA(ctx->opcode)); \
7844 rb = gen_avr_ptr(rB(ctx->opcode)); \
7845 rd = gen_avr_ptr(rD(ctx->opcode)); \
7847 ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
7849 gen_helper_##op(cpu_crf[6], rd, ra, rb, ps); \
7851 tcg_temp_free_ptr(ra); \
7852 tcg_temp_free_ptr(rb); \
7853 tcg_temp_free_ptr(rd); \
7854 tcg_temp_free_i32(ps); \
7860 GEN_VXFORM_DUAL(vsububm
, PPC_ALTIVEC
, PPC_NONE
, \
7861 bcdadd
, PPC_NONE
, PPC2_ALTIVEC_207
)
7862 GEN_VXFORM_DUAL(vsububs
, PPC_ALTIVEC
, PPC_NONE
, \
7863 bcdadd
, PPC_NONE
, PPC2_ALTIVEC_207
)
7864 GEN_VXFORM_DUAL(vsubuhm
, PPC_ALTIVEC
, PPC_NONE
, \
7865 bcdsub
, PPC_NONE
, PPC2_ALTIVEC_207
)
7866 GEN_VXFORM_DUAL(vsubuhs
, PPC_ALTIVEC
, PPC_NONE
, \
7867 bcdsub
, PPC_NONE
, PPC2_ALTIVEC_207
)
7869 static void gen_vsbox(DisasContext
*ctx
)
7872 if (unlikely(!ctx
->altivec_enabled
)) {
7873 gen_exception(ctx
, POWERPC_EXCP_VPU
);
7876 ra
= gen_avr_ptr(rA(ctx
->opcode
));
7877 rd
= gen_avr_ptr(rD(ctx
->opcode
));
7878 gen_helper_vsbox(rd
, ra
);
7879 tcg_temp_free_ptr(ra
);
7880 tcg_temp_free_ptr(rd
);
7883 GEN_VXFORM(vcipher
, 4, 20)
7884 GEN_VXFORM(vcipherlast
, 4, 20)
7885 GEN_VXFORM(vncipher
, 4, 21)
7886 GEN_VXFORM(vncipherlast
, 4, 21)
7888 GEN_VXFORM_DUAL(vcipher
, PPC_NONE
, PPC2_ALTIVEC_207
,
7889 vcipherlast
, PPC_NONE
, PPC2_ALTIVEC_207
)
7890 GEN_VXFORM_DUAL(vncipher
, PPC_NONE
, PPC2_ALTIVEC_207
,
7891 vncipherlast
, PPC_NONE
, PPC2_ALTIVEC_207
)
7893 #define VSHASIGMA(op) \
7894 static void gen_##op(DisasContext *ctx) \
7898 if (unlikely(!ctx->altivec_enabled)) { \
7899 gen_exception(ctx, POWERPC_EXCP_VPU); \
7902 ra = gen_avr_ptr(rA(ctx->opcode)); \
7903 rd = gen_avr_ptr(rD(ctx->opcode)); \
7904 st_six = tcg_const_i32(rB(ctx->opcode)); \
7905 gen_helper_##op(rd, ra, st_six); \
7906 tcg_temp_free_ptr(ra); \
7907 tcg_temp_free_ptr(rd); \
7908 tcg_temp_free_i32(st_six); \
7911 VSHASIGMA(vshasigmaw
)
7912 VSHASIGMA(vshasigmad
)
7914 GEN_VXFORM3(vpermxor
, 22, 0xFF)
7915 GEN_VXFORM_DUAL(vsldoi
, PPC_ALTIVEC
, PPC_NONE
,
7916 vpermxor
, PPC_NONE
, PPC2_ALTIVEC_207
)
7918 /*** VSX extension ***/
7920 static inline TCGv_i64
cpu_vsrh(int n
)
7925 return cpu_avrh
[n
-32];
7929 static inline TCGv_i64
cpu_vsrl(int n
)
7934 return cpu_avrl
[n
-32];
7938 #define VSX_LOAD_SCALAR(name, operation) \
7939 static void gen_##name(DisasContext *ctx) \
7942 if (unlikely(!ctx->vsx_enabled)) { \
7943 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7946 gen_set_access_type(ctx, ACCESS_INT); \
7947 EA = tcg_temp_new(); \
7948 gen_addr_reg_index(ctx, EA); \
7949 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
7950 /* NOTE: cpu_vsrl is undefined */ \
7951 tcg_temp_free(EA); \
7954 VSX_LOAD_SCALAR(lxsdx
, ld64
)
7955 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
7956 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
7957 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
7959 static void gen_lxvd2x(DisasContext
*ctx
)
7962 if (unlikely(!ctx
->vsx_enabled
)) {
7963 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7966 gen_set_access_type(ctx
, ACCESS_INT
);
7967 EA
= tcg_temp_new();
7968 gen_addr_reg_index(ctx
, EA
);
7969 gen_qemu_ld64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
7970 tcg_gen_addi_tl(EA
, EA
, 8);
7971 gen_qemu_ld64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
7975 static void gen_lxvdsx(DisasContext
*ctx
)
7978 if (unlikely(!ctx
->vsx_enabled
)) {
7979 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7982 gen_set_access_type(ctx
, ACCESS_INT
);
7983 EA
= tcg_temp_new();
7984 gen_addr_reg_index(ctx
, EA
);
7985 gen_qemu_ld64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
7986 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
7990 static void gen_lxvw4x(DisasContext
*ctx
)
7994 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
7995 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
7996 if (unlikely(!ctx
->vsx_enabled
)) {
7997 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8000 gen_set_access_type(ctx
, ACCESS_INT
);
8001 EA
= tcg_temp_new();
8002 tmp
= tcg_temp_new_i64();
8004 gen_addr_reg_index(ctx
, EA
);
8005 gen_qemu_ld32u_i64(ctx
, tmp
, EA
);
8006 tcg_gen_addi_tl(EA
, EA
, 4);
8007 gen_qemu_ld32u_i64(ctx
, xth
, EA
);
8008 tcg_gen_deposit_i64(xth
, xth
, tmp
, 32, 32);
8010 tcg_gen_addi_tl(EA
, EA
, 4);
8011 gen_qemu_ld32u_i64(ctx
, tmp
, EA
);
8012 tcg_gen_addi_tl(EA
, EA
, 4);
8013 gen_qemu_ld32u_i64(ctx
, xtl
, EA
);
8014 tcg_gen_deposit_i64(xtl
, xtl
, tmp
, 32, 32);
8017 tcg_temp_free_i64(tmp
);
8020 #define VSX_STORE_SCALAR(name, operation) \
8021 static void gen_##name(DisasContext *ctx) \
8024 if (unlikely(!ctx->vsx_enabled)) { \
8025 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8028 gen_set_access_type(ctx, ACCESS_INT); \
8029 EA = tcg_temp_new(); \
8030 gen_addr_reg_index(ctx, EA); \
8031 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
8032 tcg_temp_free(EA); \
8035 VSX_STORE_SCALAR(stxsdx
, st64
)
8036 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
8037 VSX_STORE_SCALAR(stxsspx
, st32fs
)
8039 static void gen_stxvd2x(DisasContext
*ctx
)
8042 if (unlikely(!ctx
->vsx_enabled
)) {
8043 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8046 gen_set_access_type(ctx
, ACCESS_INT
);
8047 EA
= tcg_temp_new();
8048 gen_addr_reg_index(ctx
, EA
);
8049 gen_qemu_st64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
8050 tcg_gen_addi_tl(EA
, EA
, 8);
8051 gen_qemu_st64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
8055 static void gen_stxvw4x(DisasContext
*ctx
)
8059 if (unlikely(!ctx
->vsx_enabled
)) {
8060 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8063 gen_set_access_type(ctx
, ACCESS_INT
);
8064 EA
= tcg_temp_new();
8065 gen_addr_reg_index(ctx
, EA
);
8066 tmp
= tcg_temp_new_i64();
8068 tcg_gen_shri_i64(tmp
, cpu_vsrh(xS(ctx
->opcode
)), 32);
8069 gen_qemu_st32_i64(ctx
, tmp
, EA
);
8070 tcg_gen_addi_tl(EA
, EA
, 4);
8071 gen_qemu_st32_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
8073 tcg_gen_shri_i64(tmp
, cpu_vsrl(xS(ctx
->opcode
)), 32);
8074 tcg_gen_addi_tl(EA
, EA
, 4);
8075 gen_qemu_st32_i64(ctx
, tmp
, EA
);
8076 tcg_gen_addi_tl(EA
, EA
, 4);
8077 gen_qemu_st32_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
8080 tcg_temp_free_i64(tmp
);
8083 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
8084 static void gen_##name(DisasContext *ctx) \
8086 if (xS(ctx->opcode) < 32) { \
8087 if (unlikely(!ctx->fpu_enabled)) { \
8088 gen_exception(ctx, POWERPC_EXCP_FPU); \
8092 if (unlikely(!ctx->altivec_enabled)) { \
8093 gen_exception(ctx, POWERPC_EXCP_VPU); \
8097 TCGv_i64 tmp = tcg_temp_new_i64(); \
8098 tcg_gen_##tcgop1(tmp, source); \
8099 tcg_gen_##tcgop2(target, tmp); \
8100 tcg_temp_free_i64(tmp); \
8104 MV_VSRW(mfvsrwz
, ext32u_i64
, trunc_i64_tl
, cpu_gpr
[rA(ctx
->opcode
)], \
8105 cpu_vsrh(xS(ctx
->opcode
)))
8106 MV_VSRW(mtvsrwa
, extu_tl_i64
, ext32s_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
8107 cpu_gpr
[rA(ctx
->opcode
)])
8108 MV_VSRW(mtvsrwz
, extu_tl_i64
, ext32u_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
8109 cpu_gpr
[rA(ctx
->opcode
)])
8111 #if defined(TARGET_PPC64)
8112 #define MV_VSRD(name, target, source) \
8113 static void gen_##name(DisasContext *ctx) \
8115 if (xS(ctx->opcode) < 32) { \
8116 if (unlikely(!ctx->fpu_enabled)) { \
8117 gen_exception(ctx, POWERPC_EXCP_FPU); \
8121 if (unlikely(!ctx->altivec_enabled)) { \
8122 gen_exception(ctx, POWERPC_EXCP_VPU); \
8126 tcg_gen_mov_i64(target, source); \
8129 MV_VSRD(mfvsrd
, cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrh(xS(ctx
->opcode
)))
8130 MV_VSRD(mtvsrd
, cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)])
8134 static void gen_xxpermdi(DisasContext
*ctx
)
8136 if (unlikely(!ctx
->vsx_enabled
)) {
8137 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8141 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
8142 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
8145 xh
= tcg_temp_new_i64();
8146 xl
= tcg_temp_new_i64();
8148 if ((DM(ctx
->opcode
) & 2) == 0) {
8149 tcg_gen_mov_i64(xh
, cpu_vsrh(xA(ctx
->opcode
)));
8151 tcg_gen_mov_i64(xh
, cpu_vsrl(xA(ctx
->opcode
)));
8153 if ((DM(ctx
->opcode
) & 1) == 0) {
8154 tcg_gen_mov_i64(xl
, cpu_vsrh(xB(ctx
->opcode
)));
8156 tcg_gen_mov_i64(xl
, cpu_vsrl(xB(ctx
->opcode
)));
8159 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xh
);
8160 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xl
);
8162 tcg_temp_free_i64(xh
);
8163 tcg_temp_free_i64(xl
);
8165 if ((DM(ctx
->opcode
) & 2) == 0) {
8166 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
8168 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
8170 if ((DM(ctx
->opcode
) & 1) == 0) {
8171 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
8173 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
8182 #define SGN_MASK_DP 0x8000000000000000ull
8183 #define SGN_MASK_SP 0x8000000080000000ull
8185 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
8186 static void glue(gen_, name)(DisasContext * ctx) \
8189 if (unlikely(!ctx->vsx_enabled)) { \
8190 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8193 xb = tcg_temp_new_i64(); \
8194 sgm = tcg_temp_new_i64(); \
8195 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
8196 tcg_gen_movi_i64(sgm, sgn_mask); \
8199 tcg_gen_andc_i64(xb, xb, sgm); \
8203 tcg_gen_or_i64(xb, xb, sgm); \
8207 tcg_gen_xor_i64(xb, xb, sgm); \
8211 TCGv_i64 xa = tcg_temp_new_i64(); \
8212 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
8213 tcg_gen_and_i64(xa, xa, sgm); \
8214 tcg_gen_andc_i64(xb, xb, sgm); \
8215 tcg_gen_or_i64(xb, xb, xa); \
8216 tcg_temp_free_i64(xa); \
8220 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
8221 tcg_temp_free_i64(xb); \
8222 tcg_temp_free_i64(sgm); \
8225 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
8226 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
8227 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
8228 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
8230 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
8231 static void glue(gen_, name)(DisasContext * ctx) \
8233 TCGv_i64 xbh, xbl, sgm; \
8234 if (unlikely(!ctx->vsx_enabled)) { \
8235 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8238 xbh = tcg_temp_new_i64(); \
8239 xbl = tcg_temp_new_i64(); \
8240 sgm = tcg_temp_new_i64(); \
8241 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
8242 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
8243 tcg_gen_movi_i64(sgm, sgn_mask); \
8246 tcg_gen_andc_i64(xbh, xbh, sgm); \
8247 tcg_gen_andc_i64(xbl, xbl, sgm); \
8251 tcg_gen_or_i64(xbh, xbh, sgm); \
8252 tcg_gen_or_i64(xbl, xbl, sgm); \
8256 tcg_gen_xor_i64(xbh, xbh, sgm); \
8257 tcg_gen_xor_i64(xbl, xbl, sgm); \
8261 TCGv_i64 xah = tcg_temp_new_i64(); \
8262 TCGv_i64 xal = tcg_temp_new_i64(); \
8263 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
8264 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
8265 tcg_gen_and_i64(xah, xah, sgm); \
8266 tcg_gen_and_i64(xal, xal, sgm); \
8267 tcg_gen_andc_i64(xbh, xbh, sgm); \
8268 tcg_gen_andc_i64(xbl, xbl, sgm); \
8269 tcg_gen_or_i64(xbh, xbh, xah); \
8270 tcg_gen_or_i64(xbl, xbl, xal); \
8271 tcg_temp_free_i64(xah); \
8272 tcg_temp_free_i64(xal); \
8276 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
8277 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
8278 tcg_temp_free_i64(xbh); \
8279 tcg_temp_free_i64(xbl); \
8280 tcg_temp_free_i64(sgm); \
8283 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
8284 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
8285 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
8286 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
8287 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
8288 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
8289 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
8290 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
8292 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
8293 static void gen_##name(DisasContext * ctx) \
8296 if (unlikely(!ctx->vsx_enabled)) { \
8297 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8300 /* NIP cannot be restored if the memory exception comes from an helper */ \
8301 gen_update_nip(ctx, ctx->nip - 4); \
8302 opc = tcg_const_i32(ctx->opcode); \
8303 gen_helper_##name(cpu_env, opc); \
8304 tcg_temp_free_i32(opc); \
8307 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
8308 static void gen_##name(DisasContext * ctx) \
8310 if (unlikely(!ctx->vsx_enabled)) { \
8311 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8314 /* NIP cannot be restored if the exception comes */ \
8315 /* from a helper. */ \
8316 gen_update_nip(ctx, ctx->nip - 4); \
8318 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
8319 cpu_vsrh(xB(ctx->opcode))); \
8322 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
8323 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
8324 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
8325 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
8326 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
8327 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
8328 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
8329 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
8330 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
8331 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
8332 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
8333 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
8334 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
8335 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
8336 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
8337 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
8338 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
8339 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
8340 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
8341 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
8342 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
8343 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
8344 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
8345 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
8346 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
8347 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
8348 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
8349 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
8350 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
8351 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
8352 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
8353 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
8354 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
8355 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
8356 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
8357 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
8358 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
8360 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
8361 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
8362 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
8363 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
8364 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
8365 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
8366 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
8367 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
8368 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
8369 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
8370 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
8371 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
8372 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
8373 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
8374 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
8375 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
8376 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
8378 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
8379 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
8380 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
8381 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
8382 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
8383 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
8384 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
8385 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
8386 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
8387 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
8388 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
8389 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
8390 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
8391 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
8392 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
8393 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
8394 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
8395 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
8396 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
8397 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
8398 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
8399 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
8400 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
8401 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
8402 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
8403 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
8404 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
8405 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
8406 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
8407 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
8408 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
8409 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
8410 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
8411 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
8412 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
8413 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
8415 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
8416 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
8417 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
8418 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
8419 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
8420 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
8421 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
8422 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
8423 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
8424 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
8425 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
8426 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
8427 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
8428 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
8429 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
8430 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
8431 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
8432 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
8433 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
8434 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
8435 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
8436 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
8437 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
8438 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
8439 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
8440 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
8441 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
8442 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
8443 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
8444 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
8445 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
8446 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
8447 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
8448 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
8449 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
8450 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
8452 #define VSX_LOGICAL(name, tcg_op) \
8453 static void glue(gen_, name)(DisasContext * ctx) \
8455 if (unlikely(!ctx->vsx_enabled)) { \
8456 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8459 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
8460 cpu_vsrh(xB(ctx->opcode))); \
8461 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
8462 cpu_vsrl(xB(ctx->opcode))); \
8465 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
8466 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
8467 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
8468 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
8469 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
8470 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
8471 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
8472 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
8474 #define VSX_XXMRG(name, high) \
8475 static void glue(gen_, name)(DisasContext * ctx) \
8477 TCGv_i64 a0, a1, b0, b1; \
8478 if (unlikely(!ctx->vsx_enabled)) { \
8479 gen_exception(ctx, POWERPC_EXCP_VSXU); \
8482 a0 = tcg_temp_new_i64(); \
8483 a1 = tcg_temp_new_i64(); \
8484 b0 = tcg_temp_new_i64(); \
8485 b1 = tcg_temp_new_i64(); \
8487 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
8488 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
8489 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
8490 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
8492 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
8493 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
8494 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
8495 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
8497 tcg_gen_shri_i64(a0, a0, 32); \
8498 tcg_gen_shri_i64(b0, b0, 32); \
8499 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
8501 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
8503 tcg_temp_free_i64(a0); \
8504 tcg_temp_free_i64(a1); \
8505 tcg_temp_free_i64(b0); \
8506 tcg_temp_free_i64(b1); \
8509 VSX_XXMRG(xxmrghw
, 1)
8510 VSX_XXMRG(xxmrglw
, 0)
8512 static void gen_xxsel(DisasContext
* ctx
)
8515 if (unlikely(!ctx
->vsx_enabled
)) {
8516 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8519 a
= tcg_temp_new_i64();
8520 b
= tcg_temp_new_i64();
8521 c
= tcg_temp_new_i64();
8523 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
8524 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
8525 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
8527 tcg_gen_and_i64(b
, b
, c
);
8528 tcg_gen_andc_i64(a
, a
, c
);
8529 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
8531 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
8532 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
8533 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
8535 tcg_gen_and_i64(b
, b
, c
);
8536 tcg_gen_andc_i64(a
, a
, c
);
8537 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
8539 tcg_temp_free_i64(a
);
8540 tcg_temp_free_i64(b
);
8541 tcg_temp_free_i64(c
);
8544 static void gen_xxspltw(DisasContext
*ctx
)
8547 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
8548 cpu_vsrl(xB(ctx
->opcode
)) :
8549 cpu_vsrh(xB(ctx
->opcode
));
8551 if (unlikely(!ctx
->vsx_enabled
)) {
8552 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8556 b
= tcg_temp_new_i64();
8557 b2
= tcg_temp_new_i64();
8559 if (UIM(ctx
->opcode
) & 1) {
8560 tcg_gen_ext32u_i64(b
, vsr
);
8562 tcg_gen_shri_i64(b
, vsr
, 32);
8565 tcg_gen_shli_i64(b2
, b
, 32);
8566 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
8567 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
8569 tcg_temp_free_i64(b
);
8570 tcg_temp_free_i64(b2
);
8573 static void gen_xxsldwi(DisasContext
*ctx
)
8576 if (unlikely(!ctx
->vsx_enabled
)) {
8577 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
8580 xth
= tcg_temp_new_i64();
8581 xtl
= tcg_temp_new_i64();
8583 switch (SHW(ctx
->opcode
)) {
8585 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
8586 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
8590 TCGv_i64 t0
= tcg_temp_new_i64();
8591 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
8592 tcg_gen_shli_i64(xth
, xth
, 32);
8593 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
8594 tcg_gen_shri_i64(t0
, t0
, 32);
8595 tcg_gen_or_i64(xth
, xth
, t0
);
8596 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
8597 tcg_gen_shli_i64(xtl
, xtl
, 32);
8598 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
8599 tcg_gen_shri_i64(t0
, t0
, 32);
8600 tcg_gen_or_i64(xtl
, xtl
, t0
);
8601 tcg_temp_free_i64(t0
);
8605 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
8606 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
8610 TCGv_i64 t0
= tcg_temp_new_i64();
8611 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
8612 tcg_gen_shli_i64(xth
, xth
, 32);
8613 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
8614 tcg_gen_shri_i64(t0
, t0
, 32);
8615 tcg_gen_or_i64(xth
, xth
, t0
);
8616 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
8617 tcg_gen_shli_i64(xtl
, xtl
, 32);
8618 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
8619 tcg_gen_shri_i64(t0
, t0
, 32);
8620 tcg_gen_or_i64(xtl
, xtl
, t0
);
8621 tcg_temp_free_i64(t0
);
8626 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
8627 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
8629 tcg_temp_free_i64(xth
);
8630 tcg_temp_free_i64(xtl
);
8633 /*** Decimal Floating Point ***/
8635 static inline TCGv_ptr
gen_fprp_ptr(int reg
)
8637 TCGv_ptr r
= tcg_temp_new_ptr();
8638 tcg_gen_addi_ptr(r
, cpu_env
, offsetof(CPUPPCState
, fpr
[reg
]));
8642 #define GEN_DFP_T_A_B_Rc(name) \
8643 static void gen_##name(DisasContext *ctx) \
8645 TCGv_ptr rd, ra, rb; \
8646 if (unlikely(!ctx->fpu_enabled)) { \
8647 gen_exception(ctx, POWERPC_EXCP_FPU); \
8650 gen_update_nip(ctx, ctx->nip - 4); \
8651 rd = gen_fprp_ptr(rD(ctx->opcode)); \
8652 ra = gen_fprp_ptr(rA(ctx->opcode)); \
8653 rb = gen_fprp_ptr(rB(ctx->opcode)); \
8654 gen_helper_##name(cpu_env, rd, ra, rb); \
8655 if (unlikely(Rc(ctx->opcode) != 0)) { \
8656 gen_set_cr1_from_fpscr(ctx); \
8658 tcg_temp_free_ptr(rd); \
8659 tcg_temp_free_ptr(ra); \
8660 tcg_temp_free_ptr(rb); \
8663 #define GEN_DFP_BF_A_B(name) \
8664 static void gen_##name(DisasContext *ctx) \
8667 if (unlikely(!ctx->fpu_enabled)) { \
8668 gen_exception(ctx, POWERPC_EXCP_FPU); \
8671 gen_update_nip(ctx, ctx->nip - 4); \
8672 ra = gen_fprp_ptr(rA(ctx->opcode)); \
8673 rb = gen_fprp_ptr(rB(ctx->opcode)); \
8674 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8676 tcg_temp_free_ptr(ra); \
8677 tcg_temp_free_ptr(rb); \
8680 #define GEN_DFP_BF_A_DCM(name) \
8681 static void gen_##name(DisasContext *ctx) \
8685 if (unlikely(!ctx->fpu_enabled)) { \
8686 gen_exception(ctx, POWERPC_EXCP_FPU); \
8689 gen_update_nip(ctx, ctx->nip - 4); \
8690 ra = gen_fprp_ptr(rA(ctx->opcode)); \
8691 dcm = tcg_const_i32(DCM(ctx->opcode)); \
8692 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], \
8693 cpu_env, ra, dcm); \
8694 tcg_temp_free_ptr(ra); \
8695 tcg_temp_free_i32(dcm); \
8698 #define GEN_DFP_T_B_U32_U32_Rc(name, u32f1, u32f2) \
8699 static void gen_##name(DisasContext *ctx) \
8702 TCGv_i32 u32_1, u32_2; \
8703 if (unlikely(!ctx->fpu_enabled)) { \
8704 gen_exception(ctx, POWERPC_EXCP_FPU); \
8707 gen_update_nip(ctx, ctx->nip - 4); \
8708 rt = gen_fprp_ptr(rD(ctx->opcode)); \
8709 rb = gen_fprp_ptr(rB(ctx->opcode)); \
8710 u32_1 = tcg_const_i32(u32f1(ctx->opcode)); \
8711 u32_2 = tcg_const_i32(u32f2(ctx->opcode)); \
8712 gen_helper_##name(cpu_env, rt, rb, u32_1, u32_2); \
8713 if (unlikely(Rc(ctx->opcode) != 0)) { \
8714 gen_set_cr1_from_fpscr(ctx); \
8716 tcg_temp_free_ptr(rt); \
8717 tcg_temp_free_ptr(rb); \
8718 tcg_temp_free_i32(u32_1); \
8719 tcg_temp_free_i32(u32_2); \
8722 #define GEN_DFP_T_A_B_I32_Rc(name, i32fld) \
8723 static void gen_##name(DisasContext *ctx) \
8725 TCGv_ptr rt, ra, rb; \
8727 if (unlikely(!ctx->fpu_enabled)) { \
8728 gen_exception(ctx, POWERPC_EXCP_FPU); \
8731 gen_update_nip(ctx, ctx->nip - 4); \
8732 rt = gen_fprp_ptr(rD(ctx->opcode)); \
8733 ra = gen_fprp_ptr(rA(ctx->opcode)); \
8734 rb = gen_fprp_ptr(rB(ctx->opcode)); \
8735 i32 = tcg_const_i32(i32fld(ctx->opcode)); \
8736 gen_helper_##name(cpu_env, rt, ra, rb, i32); \
8737 if (unlikely(Rc(ctx->opcode) != 0)) { \
8738 gen_set_cr1_from_fpscr(ctx); \
8740 tcg_temp_free_ptr(rt); \
8741 tcg_temp_free_ptr(rb); \
8742 tcg_temp_free_ptr(ra); \
8743 tcg_temp_free_i32(i32); \
8746 #define GEN_DFP_T_B_Rc(name) \
8747 static void gen_##name(DisasContext *ctx) \
8750 if (unlikely(!ctx->fpu_enabled)) { \
8751 gen_exception(ctx, POWERPC_EXCP_FPU); \
8754 gen_update_nip(ctx, ctx->nip - 4); \
8755 rt = gen_fprp_ptr(rD(ctx->opcode)); \
8756 rb = gen_fprp_ptr(rB(ctx->opcode)); \
8757 gen_helper_##name(cpu_env, rt, rb); \
8758 if (unlikely(Rc(ctx->opcode) != 0)) { \
8759 gen_set_cr1_from_fpscr(ctx); \
8761 tcg_temp_free_ptr(rt); \
8762 tcg_temp_free_ptr(rb); \
8765 #define GEN_DFP_T_FPR_I32_Rc(name, fprfld, i32fld) \
8766 static void gen_##name(DisasContext *ctx) \
8770 if (unlikely(!ctx->fpu_enabled)) { \
8771 gen_exception(ctx, POWERPC_EXCP_FPU); \
8774 gen_update_nip(ctx, ctx->nip - 4); \
8775 rt = gen_fprp_ptr(rD(ctx->opcode)); \
8776 rs = gen_fprp_ptr(fprfld(ctx->opcode)); \
8777 i32 = tcg_const_i32(i32fld(ctx->opcode)); \
8778 gen_helper_##name(cpu_env, rt, rs, i32); \
8779 if (unlikely(Rc(ctx->opcode) != 0)) { \
8780 gen_set_cr1_from_fpscr(ctx); \
8782 tcg_temp_free_ptr(rt); \
8783 tcg_temp_free_ptr(rs); \
8784 tcg_temp_free_i32(i32); \
8787 GEN_DFP_T_A_B_Rc(dadd
)
8788 GEN_DFP_T_A_B_Rc(daddq
)
8789 GEN_DFP_T_A_B_Rc(dsub
)
8790 GEN_DFP_T_A_B_Rc(dsubq
)
8791 GEN_DFP_T_A_B_Rc(dmul
)
8792 GEN_DFP_T_A_B_Rc(dmulq
)
8793 GEN_DFP_T_A_B_Rc(ddiv
)
8794 GEN_DFP_T_A_B_Rc(ddivq
)
8795 GEN_DFP_BF_A_B(dcmpu
)
8796 GEN_DFP_BF_A_B(dcmpuq
)
8797 GEN_DFP_BF_A_B(dcmpo
)
8798 GEN_DFP_BF_A_B(dcmpoq
)
8799 GEN_DFP_BF_A_DCM(dtstdc
)
8800 GEN_DFP_BF_A_DCM(dtstdcq
)
8801 GEN_DFP_BF_A_DCM(dtstdg
)
8802 GEN_DFP_BF_A_DCM(dtstdgq
)
8803 GEN_DFP_BF_A_B(dtstex
)
8804 GEN_DFP_BF_A_B(dtstexq
)
8805 GEN_DFP_BF_A_B(dtstsf
)
8806 GEN_DFP_BF_A_B(dtstsfq
)
8807 GEN_DFP_T_B_U32_U32_Rc(dquai
, SIMM5
, RMC
)
8808 GEN_DFP_T_B_U32_U32_Rc(dquaiq
, SIMM5
, RMC
)
8809 GEN_DFP_T_A_B_I32_Rc(dqua
, RMC
)
8810 GEN_DFP_T_A_B_I32_Rc(dquaq
, RMC
)
8811 GEN_DFP_T_A_B_I32_Rc(drrnd
, RMC
)
8812 GEN_DFP_T_A_B_I32_Rc(drrndq
, RMC
)
8813 GEN_DFP_T_B_U32_U32_Rc(drintx
, FPW
, RMC
)
8814 GEN_DFP_T_B_U32_U32_Rc(drintxq
, FPW
, RMC
)
8815 GEN_DFP_T_B_U32_U32_Rc(drintn
, FPW
, RMC
)
8816 GEN_DFP_T_B_U32_U32_Rc(drintnq
, FPW
, RMC
)
8817 GEN_DFP_T_B_Rc(dctdp
)
8818 GEN_DFP_T_B_Rc(dctqpq
)
8819 GEN_DFP_T_B_Rc(drsp
)
8820 GEN_DFP_T_B_Rc(drdpq
)
8821 GEN_DFP_T_B_Rc(dcffix
)
8822 GEN_DFP_T_B_Rc(dcffixq
)
8823 GEN_DFP_T_B_Rc(dctfix
)
8824 GEN_DFP_T_B_Rc(dctfixq
)
8825 GEN_DFP_T_FPR_I32_Rc(ddedpd
, rB
, SP
)
8826 GEN_DFP_T_FPR_I32_Rc(ddedpdq
, rB
, SP
)
8827 GEN_DFP_T_FPR_I32_Rc(denbcd
, rB
, SP
)
8828 GEN_DFP_T_FPR_I32_Rc(denbcdq
, rB
, SP
)
8829 GEN_DFP_T_B_Rc(dxex
)
8830 GEN_DFP_T_B_Rc(dxexq
)
8831 GEN_DFP_T_A_B_Rc(diex
)
8832 GEN_DFP_T_A_B_Rc(diexq
)
8833 GEN_DFP_T_FPR_I32_Rc(dscli
, rA
, DCM
)
8834 GEN_DFP_T_FPR_I32_Rc(dscliq
, rA
, DCM
)
8835 GEN_DFP_T_FPR_I32_Rc(dscri
, rA
, DCM
)
8836 GEN_DFP_T_FPR_I32_Rc(dscriq
, rA
, DCM
)
8838 /*** SPE extension ***/
8839 /* Register moves */
8841 static inline void gen_evmra(DisasContext
*ctx
)
8844 if (unlikely(!ctx
->spe_enabled
)) {
8845 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8849 TCGv_i64 tmp
= tcg_temp_new_i64();
8851 /* tmp := rA_lo + rA_hi << 32 */
8852 tcg_gen_concat_tl_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
8854 /* spe_acc := tmp */
8855 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8856 tcg_temp_free_i64(tmp
);
8859 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
8860 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
8863 static inline void gen_load_gpr64(TCGv_i64 t
, int reg
)
8865 tcg_gen_concat_tl_i64(t
, cpu_gpr
[reg
], cpu_gprh
[reg
]);
8868 static inline void gen_store_gpr64(int reg
, TCGv_i64 t
)
8870 tcg_gen_extr_i64_tl(cpu_gpr
[reg
], cpu_gprh
[reg
], t
);
8873 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
8874 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
8876 if (Rc(ctx->opcode)) \
8882 /* Handler for undefined SPE opcodes */
8883 static inline void gen_speundef(DisasContext
*ctx
)
8885 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
8889 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
8890 static inline void gen_##name(DisasContext *ctx) \
8892 if (unlikely(!ctx->spe_enabled)) { \
8893 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8896 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
8897 cpu_gpr[rB(ctx->opcode)]); \
8898 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
8899 cpu_gprh[rB(ctx->opcode)]); \
8902 GEN_SPEOP_LOGIC2(evand
, tcg_gen_and_tl
);
8903 GEN_SPEOP_LOGIC2(evandc
, tcg_gen_andc_tl
);
8904 GEN_SPEOP_LOGIC2(evxor
, tcg_gen_xor_tl
);
8905 GEN_SPEOP_LOGIC2(evor
, tcg_gen_or_tl
);
8906 GEN_SPEOP_LOGIC2(evnor
, tcg_gen_nor_tl
);
8907 GEN_SPEOP_LOGIC2(eveqv
, tcg_gen_eqv_tl
);
8908 GEN_SPEOP_LOGIC2(evorc
, tcg_gen_orc_tl
);
8909 GEN_SPEOP_LOGIC2(evnand
, tcg_gen_nand_tl
);
8911 /* SPE logic immediate */
8912 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
8913 static inline void gen_##name(DisasContext *ctx) \
8916 if (unlikely(!ctx->spe_enabled)) { \
8917 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8920 t0 = tcg_temp_new_i32(); \
8922 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8923 tcg_opi(t0, t0, rB(ctx->opcode)); \
8924 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
8926 tcg_gen_trunc_tl_i32(t0, cpu_gprh[rA(ctx->opcode)]); \
8927 tcg_opi(t0, t0, rB(ctx->opcode)); \
8928 tcg_gen_extu_i32_tl(cpu_gprh[rD(ctx->opcode)], t0); \
8930 tcg_temp_free_i32(t0); \
8932 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi
, tcg_gen_shli_i32
);
8933 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu
, tcg_gen_shri_i32
);
8934 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis
, tcg_gen_sari_i32
);
8935 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi
, tcg_gen_rotli_i32
);
8937 /* SPE arithmetic */
8938 #define GEN_SPEOP_ARITH1(name, tcg_op) \
8939 static inline void gen_##name(DisasContext *ctx) \
8942 if (unlikely(!ctx->spe_enabled)) { \
8943 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8946 t0 = tcg_temp_new_i32(); \
8948 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8950 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
8952 tcg_gen_trunc_tl_i32(t0, cpu_gprh[rA(ctx->opcode)]); \
8954 tcg_gen_extu_i32_tl(cpu_gprh[rD(ctx->opcode)], t0); \
8956 tcg_temp_free_i32(t0); \
8959 static inline void gen_op_evabs(TCGv_i32 ret
, TCGv_i32 arg1
)
8961 TCGLabel
*l1
= gen_new_label();
8962 TCGLabel
*l2
= gen_new_label();
8964 tcg_gen_brcondi_i32(TCG_COND_GE
, arg1
, 0, l1
);
8965 tcg_gen_neg_i32(ret
, arg1
);
8968 tcg_gen_mov_i32(ret
, arg1
);
8971 GEN_SPEOP_ARITH1(evabs
, gen_op_evabs
);
8972 GEN_SPEOP_ARITH1(evneg
, tcg_gen_neg_i32
);
8973 GEN_SPEOP_ARITH1(evextsb
, tcg_gen_ext8s_i32
);
8974 GEN_SPEOP_ARITH1(evextsh
, tcg_gen_ext16s_i32
);
8975 static inline void gen_op_evrndw(TCGv_i32 ret
, TCGv_i32 arg1
)
8977 tcg_gen_addi_i32(ret
, arg1
, 0x8000);
8978 tcg_gen_ext16u_i32(ret
, ret
);
8980 GEN_SPEOP_ARITH1(evrndw
, gen_op_evrndw
);
8981 GEN_SPEOP_ARITH1(evcntlsw
, gen_helper_cntlsw32
);
8982 GEN_SPEOP_ARITH1(evcntlzw
, gen_helper_cntlzw32
);
8984 #define GEN_SPEOP_ARITH2(name, tcg_op) \
8985 static inline void gen_##name(DisasContext *ctx) \
8988 if (unlikely(!ctx->spe_enabled)) { \
8989 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8992 t0 = tcg_temp_new_i32(); \
8993 t1 = tcg_temp_new_i32(); \
8995 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8996 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8997 tcg_op(t0, t0, t1); \
8998 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
9000 tcg_gen_trunc_tl_i32(t0, cpu_gprh[rA(ctx->opcode)]); \
9001 tcg_gen_trunc_tl_i32(t1, cpu_gprh[rB(ctx->opcode)]); \
9002 tcg_op(t0, t0, t1); \
9003 tcg_gen_extu_i32_tl(cpu_gprh[rD(ctx->opcode)], t0); \
9005 tcg_temp_free_i32(t0); \
9006 tcg_temp_free_i32(t1); \
9009 static inline void gen_op_evsrwu(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
9011 TCGLabel
*l1
= gen_new_label();
9012 TCGLabel
*l2
= gen_new_label();
9013 TCGv_i32 t0
= tcg_temp_local_new_i32();
9015 /* No error here: 6 bits are used */
9016 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
9017 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
9018 tcg_gen_shr_i32(ret
, arg1
, t0
);
9021 tcg_gen_movi_i32(ret
, 0);
9023 tcg_temp_free_i32(t0
);
9025 GEN_SPEOP_ARITH2(evsrwu
, gen_op_evsrwu
);
9026 static inline void gen_op_evsrws(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
9028 TCGLabel
*l1
= gen_new_label();
9029 TCGLabel
*l2
= gen_new_label();
9030 TCGv_i32 t0
= tcg_temp_local_new_i32();
9032 /* No error here: 6 bits are used */
9033 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
9034 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
9035 tcg_gen_sar_i32(ret
, arg1
, t0
);
9038 tcg_gen_movi_i32(ret
, 0);
9040 tcg_temp_free_i32(t0
);
9042 GEN_SPEOP_ARITH2(evsrws
, gen_op_evsrws
);
9043 static inline void gen_op_evslw(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
9045 TCGLabel
*l1
= gen_new_label();
9046 TCGLabel
*l2
= gen_new_label();
9047 TCGv_i32 t0
= tcg_temp_local_new_i32();
9049 /* No error here: 6 bits are used */
9050 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
9051 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
9052 tcg_gen_shl_i32(ret
, arg1
, t0
);
9055 tcg_gen_movi_i32(ret
, 0);
9057 tcg_temp_free_i32(t0
);
9059 GEN_SPEOP_ARITH2(evslw
, gen_op_evslw
);
9060 static inline void gen_op_evrlw(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
9062 TCGv_i32 t0
= tcg_temp_new_i32();
9063 tcg_gen_andi_i32(t0
, arg2
, 0x1F);
9064 tcg_gen_rotl_i32(ret
, arg1
, t0
);
9065 tcg_temp_free_i32(t0
);
9067 GEN_SPEOP_ARITH2(evrlw
, gen_op_evrlw
);
9068 static inline void gen_evmergehi(DisasContext
*ctx
)
9070 if (unlikely(!ctx
->spe_enabled
)) {
9071 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9074 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
9075 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
9077 GEN_SPEOP_ARITH2(evaddw
, tcg_gen_add_i32
);
9078 static inline void gen_op_evsubf(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
9080 tcg_gen_sub_i32(ret
, arg2
, arg1
);
9082 GEN_SPEOP_ARITH2(evsubfw
, gen_op_evsubf
);
9084 /* SPE arithmetic immediate */
9085 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
9086 static inline void gen_##name(DisasContext *ctx) \
9089 if (unlikely(!ctx->spe_enabled)) { \
9090 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9093 t0 = tcg_temp_new_i32(); \
9095 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
9096 tcg_op(t0, t0, rA(ctx->opcode)); \
9097 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
9099 tcg_gen_trunc_tl_i32(t0, cpu_gprh[rB(ctx->opcode)]); \
9100 tcg_op(t0, t0, rA(ctx->opcode)); \
9101 tcg_gen_extu_i32_tl(cpu_gprh[rD(ctx->opcode)], t0); \
9103 tcg_temp_free_i32(t0); \
9105 GEN_SPEOP_ARITH_IMM2(evaddiw
, tcg_gen_addi_i32
);
9106 GEN_SPEOP_ARITH_IMM2(evsubifw
, tcg_gen_subi_i32
);
9108 /* SPE comparison */
9109 #define GEN_SPEOP_COMP(name, tcg_cond) \
9110 static inline void gen_##name(DisasContext *ctx) \
9112 if (unlikely(!ctx->spe_enabled)) { \
9113 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9116 TCGLabel *l1 = gen_new_label(); \
9117 TCGLabel *l2 = gen_new_label(); \
9118 TCGLabel *l3 = gen_new_label(); \
9119 TCGLabel *l4 = gen_new_label(); \
9121 tcg_gen_ext32s_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
9122 tcg_gen_ext32s_tl(cpu_gpr[rB(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
9123 tcg_gen_ext32s_tl(cpu_gprh[rA(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
9124 tcg_gen_ext32s_tl(cpu_gprh[rB(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]); \
9126 tcg_gen_brcond_tl(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
9127 cpu_gpr[rB(ctx->opcode)], l1); \
9128 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
9130 gen_set_label(l1); \
9131 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
9132 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
9133 gen_set_label(l2); \
9134 tcg_gen_brcond_tl(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
9135 cpu_gprh[rB(ctx->opcode)], l3); \
9136 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
9137 ~(CRF_CH | CRF_CH_AND_CL)); \
9139 gen_set_label(l3); \
9140 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
9141 CRF_CH | CRF_CH_OR_CL); \
9142 gen_set_label(l4); \
9144 GEN_SPEOP_COMP(evcmpgtu
, TCG_COND_GTU
);
9145 GEN_SPEOP_COMP(evcmpgts
, TCG_COND_GT
);
9146 GEN_SPEOP_COMP(evcmpltu
, TCG_COND_LTU
);
9147 GEN_SPEOP_COMP(evcmplts
, TCG_COND_LT
);
9148 GEN_SPEOP_COMP(evcmpeq
, TCG_COND_EQ
);
9151 static inline void gen_brinc(DisasContext
*ctx
)
9153 /* Note: brinc is usable even if SPE is disabled */
9154 gen_helper_brinc(cpu_gpr
[rD(ctx
->opcode
)],
9155 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
9157 static inline void gen_evmergelo(DisasContext
*ctx
)
9159 if (unlikely(!ctx
->spe_enabled
)) {
9160 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9163 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9164 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
9166 static inline void gen_evmergehilo(DisasContext
*ctx
)
9168 if (unlikely(!ctx
->spe_enabled
)) {
9169 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9172 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
9173 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
9175 static inline void gen_evmergelohi(DisasContext
*ctx
)
9177 if (unlikely(!ctx
->spe_enabled
)) {
9178 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9181 if (rD(ctx
->opcode
) == rA(ctx
->opcode
)) {
9182 TCGv tmp
= tcg_temp_new();
9183 tcg_gen_mov_tl(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
9184 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
9185 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], tmp
);
9188 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
9189 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9192 static inline void gen_evsplati(DisasContext
*ctx
)
9194 uint64_t imm
= ((int32_t)(rA(ctx
->opcode
) << 27)) >> 27;
9196 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], imm
);
9197 tcg_gen_movi_tl(cpu_gprh
[rD(ctx
->opcode
)], imm
);
9199 static inline void gen_evsplatfi(DisasContext
*ctx
)
9201 uint64_t imm
= rA(ctx
->opcode
) << 27;
9203 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], imm
);
9204 tcg_gen_movi_tl(cpu_gprh
[rD(ctx
->opcode
)], imm
);
9207 static inline void gen_evsel(DisasContext
*ctx
)
9209 TCGLabel
*l1
= gen_new_label();
9210 TCGLabel
*l2
= gen_new_label();
9211 TCGLabel
*l3
= gen_new_label();
9212 TCGLabel
*l4
= gen_new_label();
9213 TCGv_i32 t0
= tcg_temp_local_new_i32();
9215 tcg_gen_andi_i32(t0
, cpu_crf
[ctx
->opcode
& 0x07], 1 << 3);
9216 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
9217 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
9220 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
9222 tcg_gen_andi_i32(t0
, cpu_crf
[ctx
->opcode
& 0x07], 1 << 2);
9223 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l3
);
9224 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9227 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
9229 tcg_temp_free_i32(t0
);
9232 static void gen_evsel0(DisasContext
*ctx
)
9237 static void gen_evsel1(DisasContext
*ctx
)
9242 static void gen_evsel2(DisasContext
*ctx
)
9247 static void gen_evsel3(DisasContext
*ctx
)
9254 static inline void gen_evmwumi(DisasContext
*ctx
)
9258 if (unlikely(!ctx
->spe_enabled
)) {
9259 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9263 t0
= tcg_temp_new_i64();
9264 t1
= tcg_temp_new_i64();
9266 /* t0 := rA; t1 := rB */
9267 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
9268 tcg_gen_ext32u_i64(t0
, t0
);
9269 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
9270 tcg_gen_ext32u_i64(t1
, t1
);
9272 tcg_gen_mul_i64(t0
, t0
, t1
); /* t0 := rA * rB */
9274 gen_store_gpr64(rD(ctx
->opcode
), t0
); /* rD := t0 */
9276 tcg_temp_free_i64(t0
);
9277 tcg_temp_free_i64(t1
);
9280 static inline void gen_evmwumia(DisasContext
*ctx
)
9284 if (unlikely(!ctx
->spe_enabled
)) {
9285 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9289 gen_evmwumi(ctx
); /* rD := rA * rB */
9291 tmp
= tcg_temp_new_i64();
9294 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
9295 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9296 tcg_temp_free_i64(tmp
);
9299 static inline void gen_evmwumiaa(DisasContext
*ctx
)
9304 if (unlikely(!ctx
->spe_enabled
)) {
9305 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9309 gen_evmwumi(ctx
); /* rD := rA * rB */
9311 acc
= tcg_temp_new_i64();
9312 tmp
= tcg_temp_new_i64();
9315 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
9318 tcg_gen_ld_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9320 /* acc := tmp + acc */
9321 tcg_gen_add_i64(acc
, acc
, tmp
);
9324 tcg_gen_st_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9327 gen_store_gpr64(rD(ctx
->opcode
), acc
);
9329 tcg_temp_free_i64(acc
);
9330 tcg_temp_free_i64(tmp
);
9333 static inline void gen_evmwsmi(DisasContext
*ctx
)
9337 if (unlikely(!ctx
->spe_enabled
)) {
9338 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9342 t0
= tcg_temp_new_i64();
9343 t1
= tcg_temp_new_i64();
9345 /* t0 := rA; t1 := rB */
9346 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
9347 tcg_gen_ext32s_i64(t0
, t0
);
9348 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
9349 tcg_gen_ext32s_i64(t1
, t1
);
9351 tcg_gen_mul_i64(t0
, t0
, t1
); /* t0 := rA * rB */
9353 gen_store_gpr64(rD(ctx
->opcode
), t0
); /* rD := t0 */
9355 tcg_temp_free_i64(t0
);
9356 tcg_temp_free_i64(t1
);
9359 static inline void gen_evmwsmia(DisasContext
*ctx
)
9363 gen_evmwsmi(ctx
); /* rD := rA * rB */
9365 tmp
= tcg_temp_new_i64();
9368 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
9369 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9371 tcg_temp_free_i64(tmp
);
9374 static inline void gen_evmwsmiaa(DisasContext
*ctx
)
9376 TCGv_i64 acc
= tcg_temp_new_i64();
9377 TCGv_i64 tmp
= tcg_temp_new_i64();
9379 gen_evmwsmi(ctx
); /* rD := rA * rB */
9381 acc
= tcg_temp_new_i64();
9382 tmp
= tcg_temp_new_i64();
9385 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
9388 tcg_gen_ld_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9390 /* acc := tmp + acc */
9391 tcg_gen_add_i64(acc
, acc
, tmp
);
9394 tcg_gen_st_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
9397 gen_store_gpr64(rD(ctx
->opcode
), acc
);
9399 tcg_temp_free_i64(acc
);
9400 tcg_temp_free_i64(tmp
);
9403 GEN_SPE(evaddw
, speundef
, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
9404 GEN_SPE(evaddiw
, speundef
, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
9405 GEN_SPE(evsubfw
, speundef
, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
9406 GEN_SPE(evsubifw
, speundef
, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
9407 GEN_SPE(evabs
, evneg
, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
9408 GEN_SPE(evextsb
, evextsh
, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
9409 GEN_SPE(evrndw
, evcntlzw
, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
9410 GEN_SPE(evcntlsw
, brinc
, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE
); //
9411 GEN_SPE(evmra
, speundef
, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE
);
9412 GEN_SPE(speundef
, evand
, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
); ////
9413 GEN_SPE(evandc
, speundef
, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
9414 GEN_SPE(evxor
, evor
, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
9415 GEN_SPE(evnor
, eveqv
, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
9416 GEN_SPE(evmwumi
, evmwsmi
, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
9417 GEN_SPE(evmwumia
, evmwsmia
, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
9418 GEN_SPE(evmwumiaa
, evmwsmiaa
, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
9419 GEN_SPE(speundef
, evorc
, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
); ////
9420 GEN_SPE(evnand
, speundef
, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
9421 GEN_SPE(evsrwu
, evsrws
, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
9422 GEN_SPE(evsrwiu
, evsrwis
, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE
);
9423 GEN_SPE(evslw
, speundef
, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
9424 GEN_SPE(evslwi
, speundef
, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
9425 GEN_SPE(evrlw
, evsplati
, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE
); //
9426 GEN_SPE(evrlwi
, evsplatfi
, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE
);
9427 GEN_SPE(evmergehi
, evmergelo
, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
9428 GEN_SPE(evmergehilo
, evmergelohi
, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
9429 GEN_SPE(evcmpgtu
, evcmpgts
, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE
); ////
9430 GEN_SPE(evcmpltu
, evcmplts
, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE
); ////
9431 GEN_SPE(evcmpeq
, speundef
, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE
); ////
9433 /* SPE load and stores */
9434 static inline void gen_addr_spe_imm_index(DisasContext
*ctx
, TCGv EA
, int sh
)
9436 target_ulong uimm
= rB(ctx
->opcode
);
9438 if (rA(ctx
->opcode
) == 0) {
9439 tcg_gen_movi_tl(EA
, uimm
<< sh
);
9441 tcg_gen_addi_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], uimm
<< sh
);
9442 if (NARROW_MODE(ctx
)) {
9443 tcg_gen_ext32u_tl(EA
, EA
);
9448 static inline void gen_op_evldd(DisasContext
*ctx
, TCGv addr
)
9450 TCGv_i64 t0
= tcg_temp_new_i64();
9451 gen_qemu_ld64(ctx
, t0
, addr
);
9452 gen_store_gpr64(rD(ctx
->opcode
), t0
);
9453 tcg_temp_free_i64(t0
);
9456 static inline void gen_op_evldw(DisasContext
*ctx
, TCGv addr
)
9458 gen_qemu_ld32u(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
9459 gen_addr_add(ctx
, addr
, addr
, 4);
9460 gen_qemu_ld32u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
9463 static inline void gen_op_evldh(DisasContext
*ctx
, TCGv addr
)
9465 TCGv t0
= tcg_temp_new();
9466 gen_qemu_ld16u(ctx
, t0
, addr
);
9467 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
9468 gen_addr_add(ctx
, addr
, addr
, 2);
9469 gen_qemu_ld16u(ctx
, t0
, addr
);
9470 tcg_gen_or_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
9471 gen_addr_add(ctx
, addr
, addr
, 2);
9472 gen_qemu_ld16u(ctx
, t0
, addr
);
9473 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
9474 gen_addr_add(ctx
, addr
, addr
, 2);
9475 gen_qemu_ld16u(ctx
, t0
, addr
);
9476 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
9480 static inline void gen_op_evlhhesplat(DisasContext
*ctx
, TCGv addr
)
9482 TCGv t0
= tcg_temp_new();
9483 gen_qemu_ld16u(ctx
, t0
, addr
);
9484 tcg_gen_shli_tl(t0
, t0
, 16);
9485 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
9486 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
9490 static inline void gen_op_evlhhousplat(DisasContext
*ctx
, TCGv addr
)
9492 TCGv t0
= tcg_temp_new();
9493 gen_qemu_ld16u(ctx
, t0
, addr
);
9494 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
9495 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
9499 static inline void gen_op_evlhhossplat(DisasContext
*ctx
, TCGv addr
)
9501 TCGv t0
= tcg_temp_new();
9502 gen_qemu_ld16s(ctx
, t0
, addr
);
9503 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
9504 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
9508 static inline void gen_op_evlwhe(DisasContext
*ctx
, TCGv addr
)
9510 TCGv t0
= tcg_temp_new();
9511 gen_qemu_ld16u(ctx
, t0
, addr
);
9512 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
9513 gen_addr_add(ctx
, addr
, addr
, 2);
9514 gen_qemu_ld16u(ctx
, t0
, addr
);
9515 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 16);
9519 static inline void gen_op_evlwhou(DisasContext
*ctx
, TCGv addr
)
9521 gen_qemu_ld16u(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
9522 gen_addr_add(ctx
, addr
, addr
, 2);
9523 gen_qemu_ld16u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
9526 static inline void gen_op_evlwhos(DisasContext
*ctx
, TCGv addr
)
9528 gen_qemu_ld16s(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
9529 gen_addr_add(ctx
, addr
, addr
, 2);
9530 gen_qemu_ld16s(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
9533 static inline void gen_op_evlwwsplat(DisasContext
*ctx
, TCGv addr
)
9535 TCGv t0
= tcg_temp_new();
9536 gen_qemu_ld32u(ctx
, t0
, addr
);
9537 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
9538 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
9542 static inline void gen_op_evlwhsplat(DisasContext
*ctx
, TCGv addr
)
9544 TCGv t0
= tcg_temp_new();
9545 gen_qemu_ld16u(ctx
, t0
, addr
);
9546 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
9547 tcg_gen_or_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
9548 gen_addr_add(ctx
, addr
, addr
, 2);
9549 gen_qemu_ld16u(ctx
, t0
, addr
);
9550 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 16);
9551 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
9555 static inline void gen_op_evstdd(DisasContext
*ctx
, TCGv addr
)
9557 TCGv_i64 t0
= tcg_temp_new_i64();
9558 gen_load_gpr64(t0
, rS(ctx
->opcode
));
9559 gen_qemu_st64(ctx
, t0
, addr
);
9560 tcg_temp_free_i64(t0
);
9563 static inline void gen_op_evstdw(DisasContext
*ctx
, TCGv addr
)
9565 gen_qemu_st32(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
9566 gen_addr_add(ctx
, addr
, addr
, 4);
9567 gen_qemu_st32(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
9570 static inline void gen_op_evstdh(DisasContext
*ctx
, TCGv addr
)
9572 TCGv t0
= tcg_temp_new();
9573 tcg_gen_shri_tl(t0
, cpu_gprh
[rS(ctx
->opcode
)], 16);
9574 gen_qemu_st16(ctx
, t0
, addr
);
9575 gen_addr_add(ctx
, addr
, addr
, 2);
9576 gen_qemu_st16(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
9577 gen_addr_add(ctx
, addr
, addr
, 2);
9578 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 16);
9579 gen_qemu_st16(ctx
, t0
, addr
);
9581 gen_addr_add(ctx
, addr
, addr
, 2);
9582 gen_qemu_st16(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
9585 static inline void gen_op_evstwhe(DisasContext
*ctx
, TCGv addr
)
9587 TCGv t0
= tcg_temp_new();
9588 tcg_gen_shri_tl(t0
, cpu_gprh
[rS(ctx
->opcode
)], 16);
9589 gen_qemu_st16(ctx
, t0
, addr
);
9590 gen_addr_add(ctx
, addr
, addr
, 2);
9591 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 16);
9592 gen_qemu_st16(ctx
, t0
, addr
);
9596 static inline void gen_op_evstwho(DisasContext
*ctx
, TCGv addr
)
9598 gen_qemu_st16(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
9599 gen_addr_add(ctx
, addr
, addr
, 2);
9600 gen_qemu_st16(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
9603 static inline void gen_op_evstwwe(DisasContext
*ctx
, TCGv addr
)
9605 gen_qemu_st32(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
9608 static inline void gen_op_evstwwo(DisasContext
*ctx
, TCGv addr
)
9610 gen_qemu_st32(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
9613 #define GEN_SPEOP_LDST(name, opc2, sh) \
9614 static void glue(gen_, name)(DisasContext *ctx) \
9617 if (unlikely(!ctx->spe_enabled)) { \
9618 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9621 gen_set_access_type(ctx, ACCESS_INT); \
9622 t0 = tcg_temp_new(); \
9623 if (Rc(ctx->opcode)) { \
9624 gen_addr_spe_imm_index(ctx, t0, sh); \
9626 gen_addr_reg_index(ctx, t0); \
9628 gen_op_##name(ctx, t0); \
9629 tcg_temp_free(t0); \
9632 GEN_SPEOP_LDST(evldd
, 0x00, 3);
9633 GEN_SPEOP_LDST(evldw
, 0x01, 3);
9634 GEN_SPEOP_LDST(evldh
, 0x02, 3);
9635 GEN_SPEOP_LDST(evlhhesplat
, 0x04, 1);
9636 GEN_SPEOP_LDST(evlhhousplat
, 0x06, 1);
9637 GEN_SPEOP_LDST(evlhhossplat
, 0x07, 1);
9638 GEN_SPEOP_LDST(evlwhe
, 0x08, 2);
9639 GEN_SPEOP_LDST(evlwhou
, 0x0A, 2);
9640 GEN_SPEOP_LDST(evlwhos
, 0x0B, 2);
9641 GEN_SPEOP_LDST(evlwwsplat
, 0x0C, 2);
9642 GEN_SPEOP_LDST(evlwhsplat
, 0x0E, 2);
9644 GEN_SPEOP_LDST(evstdd
, 0x10, 3);
9645 GEN_SPEOP_LDST(evstdw
, 0x11, 3);
9646 GEN_SPEOP_LDST(evstdh
, 0x12, 3);
9647 GEN_SPEOP_LDST(evstwhe
, 0x18, 2);
9648 GEN_SPEOP_LDST(evstwho
, 0x1A, 2);
9649 GEN_SPEOP_LDST(evstwwe
, 0x1C, 2);
9650 GEN_SPEOP_LDST(evstwwo
, 0x1E, 2);
9652 /* Multiply and add - TODO */
9654 GEN_SPE(speundef
, evmhessf
, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);//
9655 GEN_SPE(speundef
, evmhossf
, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9656 GEN_SPE(evmheumi
, evmhesmi
, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
9657 GEN_SPE(speundef
, evmhesmf
, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9658 GEN_SPE(evmhoumi
, evmhosmi
, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
9659 GEN_SPE(speundef
, evmhosmf
, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9660 GEN_SPE(speundef
, evmhessfa
, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9661 GEN_SPE(speundef
, evmhossfa
, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9662 GEN_SPE(evmheumia
, evmhesmia
, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
9663 GEN_SPE(speundef
, evmhesmfa
, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9664 GEN_SPE(evmhoumia
, evmhosmia
, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
9665 GEN_SPE(speundef
, evmhosmfa
, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9667 GEN_SPE(speundef
, evmwhssf
, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9668 GEN_SPE(evmwlumi
, speundef
, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
9669 GEN_SPE(evmwhumi
, evmwhsmi
, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
9670 GEN_SPE(speundef
, evmwhsmf
, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9671 GEN_SPE(speundef
, evmwssf
, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9672 GEN_SPE(speundef
, evmwsmf
, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9673 GEN_SPE(speundef
, evmwhssfa
, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9674 GEN_SPE(evmwlumia
, speundef
, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
9675 GEN_SPE(evmwhumia
, evmwhsmia
, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
9676 GEN_SPE(speundef
, evmwhsmfa
, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9677 GEN_SPE(speundef
, evmwssfa
, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9678 GEN_SPE(speundef
, evmwsmfa
, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9680 GEN_SPE(evadduiaaw
, evaddsiaaw
, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
9681 GEN_SPE(evsubfusiaaw
, evsubfssiaaw
, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
9682 GEN_SPE(evaddumiaaw
, evaddsmiaaw
, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
9683 GEN_SPE(evsubfumiaaw
, evsubfsmiaaw
, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
9684 GEN_SPE(evdivws
, evdivwu
, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE
);
9686 GEN_SPE(evmheusiaaw
, evmhessiaaw
, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9687 GEN_SPE(speundef
, evmhessfaaw
, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9688 GEN_SPE(evmhousiaaw
, evmhossiaaw
, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9689 GEN_SPE(speundef
, evmhossfaaw
, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9690 GEN_SPE(evmheumiaaw
, evmhesmiaaw
, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9691 GEN_SPE(speundef
, evmhesmfaaw
, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9692 GEN_SPE(evmhoumiaaw
, evmhosmiaaw
, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9693 GEN_SPE(speundef
, evmhosmfaaw
, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9694 GEN_SPE(evmhegumiaa
, evmhegsmiaa
, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9695 GEN_SPE(speundef
, evmhegsmfaa
, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9696 GEN_SPE(evmhogumiaa
, evmhogsmiaa
, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
9697 GEN_SPE(speundef
, evmhogsmfaa
, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9699 GEN_SPE(evmwlusiaaw
, evmwlssiaaw
, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
9700 GEN_SPE(evmwlumiaaw
, evmwlsmiaaw
, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
9701 GEN_SPE(speundef
, evmwssfaa
, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9702 GEN_SPE(speundef
, evmwsmfaa
, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9704 GEN_SPE(evmheusianw
, evmhessianw
, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9705 GEN_SPE(speundef
, evmhessfanw
, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9706 GEN_SPE(evmhousianw
, evmhossianw
, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9707 GEN_SPE(speundef
, evmhossfanw
, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9708 GEN_SPE(evmheumianw
, evmhesmianw
, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9709 GEN_SPE(speundef
, evmhesmfanw
, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9710 GEN_SPE(evmhoumianw
, evmhosmianw
, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9711 GEN_SPE(speundef
, evmhosmfanw
, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9712 GEN_SPE(evmhegumian
, evmhegsmian
, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9713 GEN_SPE(speundef
, evmhegsmfan
, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9714 GEN_SPE(evmhigumian
, evmhigsmian
, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
9715 GEN_SPE(speundef
, evmhogsmfan
, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9717 GEN_SPE(evmwlusianw
, evmwlssianw
, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
9718 GEN_SPE(evmwlumianw
, evmwlsmianw
, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
9719 GEN_SPE(speundef
, evmwssfan
, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9720 GEN_SPE(evmwumian
, evmwsmian
, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
9721 GEN_SPE(speundef
, evmwsmfan
, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
9724 /*** SPE floating-point extension ***/
9725 #define GEN_SPEFPUOP_CONV_32_32(name) \
9726 static inline void gen_##name(DisasContext *ctx) \
9728 TCGv_i32 t0 = tcg_temp_new_i32(); \
9729 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
9730 gen_helper_##name(t0, cpu_env, t0); \
9731 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
9732 tcg_temp_free_i32(t0); \
9734 #define GEN_SPEFPUOP_CONV_32_64(name) \
9735 static inline void gen_##name(DisasContext *ctx) \
9737 TCGv_i64 t0 = tcg_temp_new_i64(); \
9738 TCGv_i32 t1 = tcg_temp_new_i32(); \
9739 gen_load_gpr64(t0, rB(ctx->opcode)); \
9740 gen_helper_##name(t1, cpu_env, t0); \
9741 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); \
9742 tcg_temp_free_i64(t0); \
9743 tcg_temp_free_i32(t1); \
9745 #define GEN_SPEFPUOP_CONV_64_32(name) \
9746 static inline void gen_##name(DisasContext *ctx) \
9748 TCGv_i64 t0 = tcg_temp_new_i64(); \
9749 TCGv_i32 t1 = tcg_temp_new_i32(); \
9750 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
9751 gen_helper_##name(t0, cpu_env, t1); \
9752 gen_store_gpr64(rD(ctx->opcode), t0); \
9753 tcg_temp_free_i64(t0); \
9754 tcg_temp_free_i32(t1); \
9756 #define GEN_SPEFPUOP_CONV_64_64(name) \
9757 static inline void gen_##name(DisasContext *ctx) \
9759 TCGv_i64 t0 = tcg_temp_new_i64(); \
9760 gen_load_gpr64(t0, rB(ctx->opcode)); \
9761 gen_helper_##name(t0, cpu_env, t0); \
9762 gen_store_gpr64(rD(ctx->opcode), t0); \
9763 tcg_temp_free_i64(t0); \
9765 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
9766 static inline void gen_##name(DisasContext *ctx) \
9769 if (unlikely(!ctx->spe_enabled)) { \
9770 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9773 t0 = tcg_temp_new_i32(); \
9774 t1 = tcg_temp_new_i32(); \
9775 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
9776 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
9777 gen_helper_##name(t0, cpu_env, t0, t1); \
9778 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); \
9780 tcg_temp_free_i32(t0); \
9781 tcg_temp_free_i32(t1); \
9783 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
9784 static inline void gen_##name(DisasContext *ctx) \
9787 if (unlikely(!ctx->spe_enabled)) { \
9788 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9791 t0 = tcg_temp_new_i64(); \
9792 t1 = tcg_temp_new_i64(); \
9793 gen_load_gpr64(t0, rA(ctx->opcode)); \
9794 gen_load_gpr64(t1, rB(ctx->opcode)); \
9795 gen_helper_##name(t0, cpu_env, t0, t1); \
9796 gen_store_gpr64(rD(ctx->opcode), t0); \
9797 tcg_temp_free_i64(t0); \
9798 tcg_temp_free_i64(t1); \
9800 #define GEN_SPEFPUOP_COMP_32(name) \
9801 static inline void gen_##name(DisasContext *ctx) \
9804 if (unlikely(!ctx->spe_enabled)) { \
9805 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9808 t0 = tcg_temp_new_i32(); \
9809 t1 = tcg_temp_new_i32(); \
9811 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
9812 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
9813 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
9815 tcg_temp_free_i32(t0); \
9816 tcg_temp_free_i32(t1); \
9818 #define GEN_SPEFPUOP_COMP_64(name) \
9819 static inline void gen_##name(DisasContext *ctx) \
9822 if (unlikely(!ctx->spe_enabled)) { \
9823 gen_exception(ctx, POWERPC_EXCP_SPEU); \
9826 t0 = tcg_temp_new_i64(); \
9827 t1 = tcg_temp_new_i64(); \
9828 gen_load_gpr64(t0, rA(ctx->opcode)); \
9829 gen_load_gpr64(t1, rB(ctx->opcode)); \
9830 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
9831 tcg_temp_free_i64(t0); \
9832 tcg_temp_free_i64(t1); \
9835 /* Single precision floating-point vectors operations */
9837 GEN_SPEFPUOP_ARITH2_64_64(evfsadd
);
9838 GEN_SPEFPUOP_ARITH2_64_64(evfssub
);
9839 GEN_SPEFPUOP_ARITH2_64_64(evfsmul
);
9840 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv
);
9841 static inline void gen_evfsabs(DisasContext
*ctx
)
9843 if (unlikely(!ctx
->spe_enabled
)) {
9844 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9847 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
9849 tcg_gen_andi_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
9852 static inline void gen_evfsnabs(DisasContext
*ctx
)
9854 if (unlikely(!ctx
->spe_enabled
)) {
9855 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9858 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
9860 tcg_gen_ori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
9863 static inline void gen_evfsneg(DisasContext
*ctx
)
9865 if (unlikely(!ctx
->spe_enabled
)) {
9866 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9869 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
9871 tcg_gen_xori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
9876 GEN_SPEFPUOP_CONV_64_64(evfscfui
);
9877 GEN_SPEFPUOP_CONV_64_64(evfscfsi
);
9878 GEN_SPEFPUOP_CONV_64_64(evfscfuf
);
9879 GEN_SPEFPUOP_CONV_64_64(evfscfsf
);
9880 GEN_SPEFPUOP_CONV_64_64(evfsctui
);
9881 GEN_SPEFPUOP_CONV_64_64(evfsctsi
);
9882 GEN_SPEFPUOP_CONV_64_64(evfsctuf
);
9883 GEN_SPEFPUOP_CONV_64_64(evfsctsf
);
9884 GEN_SPEFPUOP_CONV_64_64(evfsctuiz
);
9885 GEN_SPEFPUOP_CONV_64_64(evfsctsiz
);
9888 GEN_SPEFPUOP_COMP_64(evfscmpgt
);
9889 GEN_SPEFPUOP_COMP_64(evfscmplt
);
9890 GEN_SPEFPUOP_COMP_64(evfscmpeq
);
9891 GEN_SPEFPUOP_COMP_64(evfststgt
);
9892 GEN_SPEFPUOP_COMP_64(evfststlt
);
9893 GEN_SPEFPUOP_COMP_64(evfststeq
);
9895 /* Opcodes definitions */
9896 GEN_SPE(evfsadd
, evfssub
, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9897 GEN_SPE(evfsabs
, evfsnabs
, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
); //
9898 GEN_SPE(evfsneg
, speundef
, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9899 GEN_SPE(evfsmul
, evfsdiv
, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9900 GEN_SPE(evfscmpgt
, evfscmplt
, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9901 GEN_SPE(evfscmpeq
, speundef
, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9902 GEN_SPE(evfscfui
, evfscfsi
, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9903 GEN_SPE(evfscfuf
, evfscfsf
, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9904 GEN_SPE(evfsctui
, evfsctsi
, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9905 GEN_SPE(evfsctuf
, evfsctsf
, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9906 GEN_SPE(evfsctuiz
, speundef
, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9907 GEN_SPE(evfsctsiz
, speundef
, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9908 GEN_SPE(evfststgt
, evfststlt
, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9909 GEN_SPE(evfststeq
, speundef
, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9911 /* Single precision floating-point operations */
9913 GEN_SPEFPUOP_ARITH2_32_32(efsadd
);
9914 GEN_SPEFPUOP_ARITH2_32_32(efssub
);
9915 GEN_SPEFPUOP_ARITH2_32_32(efsmul
);
9916 GEN_SPEFPUOP_ARITH2_32_32(efsdiv
);
9917 static inline void gen_efsabs(DisasContext
*ctx
)
9919 if (unlikely(!ctx
->spe_enabled
)) {
9920 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9923 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], (target_long
)~0x80000000LL
);
9925 static inline void gen_efsnabs(DisasContext
*ctx
)
9927 if (unlikely(!ctx
->spe_enabled
)) {
9928 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9931 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
9933 static inline void gen_efsneg(DisasContext
*ctx
)
9935 if (unlikely(!ctx
->spe_enabled
)) {
9936 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9939 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
9943 GEN_SPEFPUOP_CONV_32_32(efscfui
);
9944 GEN_SPEFPUOP_CONV_32_32(efscfsi
);
9945 GEN_SPEFPUOP_CONV_32_32(efscfuf
);
9946 GEN_SPEFPUOP_CONV_32_32(efscfsf
);
9947 GEN_SPEFPUOP_CONV_32_32(efsctui
);
9948 GEN_SPEFPUOP_CONV_32_32(efsctsi
);
9949 GEN_SPEFPUOP_CONV_32_32(efsctuf
);
9950 GEN_SPEFPUOP_CONV_32_32(efsctsf
);
9951 GEN_SPEFPUOP_CONV_32_32(efsctuiz
);
9952 GEN_SPEFPUOP_CONV_32_32(efsctsiz
);
9953 GEN_SPEFPUOP_CONV_32_64(efscfd
);
9956 GEN_SPEFPUOP_COMP_32(efscmpgt
);
9957 GEN_SPEFPUOP_COMP_32(efscmplt
);
9958 GEN_SPEFPUOP_COMP_32(efscmpeq
);
9959 GEN_SPEFPUOP_COMP_32(efststgt
);
9960 GEN_SPEFPUOP_COMP_32(efststlt
);
9961 GEN_SPEFPUOP_COMP_32(efststeq
);
9963 /* Opcodes definitions */
9964 GEN_SPE(efsadd
, efssub
, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9965 GEN_SPE(efsabs
, efsnabs
, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
); //
9966 GEN_SPE(efsneg
, speundef
, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9967 GEN_SPE(efsmul
, efsdiv
, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9968 GEN_SPE(efscmpgt
, efscmplt
, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9969 GEN_SPE(efscmpeq
, efscfd
, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE
); //
9970 GEN_SPE(efscfui
, efscfsi
, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9971 GEN_SPE(efscfuf
, efscfsf
, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9972 GEN_SPE(efsctui
, efsctsi
, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9973 GEN_SPE(efsctuf
, efsctsf
, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9974 GEN_SPE(efsctuiz
, speundef
, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9975 GEN_SPE(efsctsiz
, speundef
, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9976 GEN_SPE(efststgt
, efststlt
, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9977 GEN_SPE(efststeq
, speundef
, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9979 /* Double precision floating-point operations */
9981 GEN_SPEFPUOP_ARITH2_64_64(efdadd
);
9982 GEN_SPEFPUOP_ARITH2_64_64(efdsub
);
9983 GEN_SPEFPUOP_ARITH2_64_64(efdmul
);
9984 GEN_SPEFPUOP_ARITH2_64_64(efddiv
);
9985 static inline void gen_efdabs(DisasContext
*ctx
)
9987 if (unlikely(!ctx
->spe_enabled
)) {
9988 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9991 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9992 tcg_gen_andi_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
9995 static inline void gen_efdnabs(DisasContext
*ctx
)
9997 if (unlikely(!ctx
->spe_enabled
)) {
9998 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
10001 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
10002 tcg_gen_ori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
10005 static inline void gen_efdneg(DisasContext
*ctx
)
10007 if (unlikely(!ctx
->spe_enabled
)) {
10008 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
10011 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
10012 tcg_gen_xori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)],
10017 GEN_SPEFPUOP_CONV_64_32(efdcfui
);
10018 GEN_SPEFPUOP_CONV_64_32(efdcfsi
);
10019 GEN_SPEFPUOP_CONV_64_32(efdcfuf
);
10020 GEN_SPEFPUOP_CONV_64_32(efdcfsf
);
10021 GEN_SPEFPUOP_CONV_32_64(efdctui
);
10022 GEN_SPEFPUOP_CONV_32_64(efdctsi
);
10023 GEN_SPEFPUOP_CONV_32_64(efdctuf
);
10024 GEN_SPEFPUOP_CONV_32_64(efdctsf
);
10025 GEN_SPEFPUOP_CONV_32_64(efdctuiz
);
10026 GEN_SPEFPUOP_CONV_32_64(efdctsiz
);
10027 GEN_SPEFPUOP_CONV_64_32(efdcfs
);
10028 GEN_SPEFPUOP_CONV_64_64(efdcfuid
);
10029 GEN_SPEFPUOP_CONV_64_64(efdcfsid
);
10030 GEN_SPEFPUOP_CONV_64_64(efdctuidz
);
10031 GEN_SPEFPUOP_CONV_64_64(efdctsidz
);
10034 GEN_SPEFPUOP_COMP_64(efdcmpgt
);
10035 GEN_SPEFPUOP_COMP_64(efdcmplt
);
10036 GEN_SPEFPUOP_COMP_64(efdcmpeq
);
10037 GEN_SPEFPUOP_COMP_64(efdtstgt
);
10038 GEN_SPEFPUOP_COMP_64(efdtstlt
);
10039 GEN_SPEFPUOP_COMP_64(efdtsteq
);
10041 /* Opcodes definitions */
10042 GEN_SPE(efdadd
, efdsub
, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
); //
10043 GEN_SPE(efdcfuid
, efdcfsid
, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10044 GEN_SPE(efdabs
, efdnabs
, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE
); //
10045 GEN_SPE(efdneg
, speundef
, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
10046 GEN_SPE(efdmul
, efddiv
, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
); //
10047 GEN_SPE(efdctuidz
, efdctsidz
, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10048 GEN_SPE(efdcmpgt
, efdcmplt
, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
); //
10049 GEN_SPE(efdcmpeq
, efdcfs
, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE
); //
10050 GEN_SPE(efdcfui
, efdcfsi
, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10051 GEN_SPE(efdcfuf
, efdcfsf
, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10052 GEN_SPE(efdctui
, efdctsi
, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10053 GEN_SPE(efdctuf
, efdctsf
, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
10054 GEN_SPE(efdctuiz
, speundef
, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
10055 GEN_SPE(efdctsiz
, speundef
, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
10056 GEN_SPE(efdtstgt
, efdtstlt
, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
); //
10057 GEN_SPE(efdtsteq
, speundef
, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
10059 static void gen_tbegin(DisasContext
*ctx
)
10061 if (unlikely(!ctx
->tm_enabled
)) {
10062 gen_exception_err(ctx
, POWERPC_EXCP_FU
, FSCR_IC_TM
);
10065 gen_helper_tbegin(cpu_env
);
10068 #define GEN_TM_NOOP(name) \
10069 static inline void gen_##name(DisasContext *ctx) \
10071 if (unlikely(!ctx->tm_enabled)) { \
10072 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
10075 /* Because tbegin always fails in QEMU, these user \
10076 * space instructions all have a simple implementation: \
10078 * CR[0] = 0b0 || MSR[TS] || 0b0 \
10079 * = 0b0 || 0b00 || 0b0 \
10081 tcg_gen_movi_i32(cpu_crf[0], 0); \
10085 GEN_TM_NOOP(tabort
);
10086 GEN_TM_NOOP(tabortwc
);
10087 GEN_TM_NOOP(tabortwci
);
10088 GEN_TM_NOOP(tabortdc
);
10089 GEN_TM_NOOP(tabortdci
);
10092 static void gen_tcheck(DisasContext
*ctx
)
10094 if (unlikely(!ctx
->tm_enabled
)) {
10095 gen_exception_err(ctx
, POWERPC_EXCP_FU
, FSCR_IC_TM
);
10098 /* Because tbegin always fails, the tcheck implementation
10101 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
10102 * = 0b1 || 0b00 || 0b0
10104 tcg_gen_movi_i32(cpu_crf
[crfD(ctx
->opcode
)], 0x8);
10107 #if defined(CONFIG_USER_ONLY)
10108 #define GEN_TM_PRIV_NOOP(name) \
10109 static inline void gen_##name(DisasContext *ctx) \
10111 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
10116 #define GEN_TM_PRIV_NOOP(name) \
10117 static inline void gen_##name(DisasContext *ctx) \
10120 if (unlikely(!ctx->tm_enabled)) { \
10121 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
10124 /* Because tbegin always fails, the implementation is \
10127 * CR[0] = 0b0 || MSR[TS] || 0b0 \
10128 * = 0b0 || 0b00 | 0b0 \
10130 tcg_gen_movi_i32(cpu_crf[0], 0); \
10135 GEN_TM_PRIV_NOOP(treclaim
);
10136 GEN_TM_PRIV_NOOP(trechkpt
);
10138 static opcode_t opcodes
[] = {
10139 GEN_HANDLER(invalid
, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE
),
10140 GEN_HANDLER(cmp
, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER
),
10141 GEN_HANDLER(cmpi
, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER
),
10142 GEN_HANDLER(cmpl
, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER
),
10143 GEN_HANDLER(cmpli
, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER
),
10144 #if defined(TARGET_PPC64)
10145 GEN_HANDLER_E(cmpeqb
, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE
, PPC2_ISA300
),
10147 GEN_HANDLER_E(cmpb
, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE
, PPC2_ISA205
),
10148 GEN_HANDLER_E(cmprb
, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE
, PPC2_ISA300
),
10149 GEN_HANDLER(isel
, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL
),
10150 GEN_HANDLER(addi
, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10151 GEN_HANDLER(addic
, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10152 GEN_HANDLER2(addic_
, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10153 GEN_HANDLER(addis
, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10154 GEN_HANDLER_E(addpcis
, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE
, PPC2_ISA300
),
10155 GEN_HANDLER(mulhw
, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER
),
10156 GEN_HANDLER(mulhwu
, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER
),
10157 GEN_HANDLER(mullw
, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER
),
10158 GEN_HANDLER(mullwo
, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER
),
10159 GEN_HANDLER(mulli
, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10160 #if defined(TARGET_PPC64)
10161 GEN_HANDLER(mulld
, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B
),
10163 GEN_HANDLER(neg
, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER
),
10164 GEN_HANDLER(nego
, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER
),
10165 GEN_HANDLER(subfic
, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10166 GEN_HANDLER2(andi_
, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10167 GEN_HANDLER2(andis_
, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10168 GEN_HANDLER(cntlzw
, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER
),
10169 GEN_HANDLER_E(cnttzw
, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE
, PPC2_ISA300
),
10170 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER
),
10171 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER
),
10172 GEN_HANDLER(ori
, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10173 GEN_HANDLER(oris
, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10174 GEN_HANDLER(xori
, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10175 GEN_HANDLER(xoris
, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10176 GEN_HANDLER(popcntb
, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB
),
10177 GEN_HANDLER(popcntw
, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD
),
10178 GEN_HANDLER_E(prtyw
, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE
, PPC2_ISA205
),
10179 #if defined(TARGET_PPC64)
10180 GEN_HANDLER(popcntd
, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD
),
10181 GEN_HANDLER(cntlzd
, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B
),
10182 GEN_HANDLER_E(cnttzd
, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE
, PPC2_ISA300
),
10183 GEN_HANDLER_E(prtyd
, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE
, PPC2_ISA205
),
10184 GEN_HANDLER_E(bpermd
, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE
, PPC2_PERM_ISA206
),
10186 GEN_HANDLER(rlwimi
, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10187 GEN_HANDLER(rlwinm
, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10188 GEN_HANDLER(rlwnm
, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10189 GEN_HANDLER(slw
, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER
),
10190 GEN_HANDLER(sraw
, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER
),
10191 GEN_HANDLER(srawi
, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER
),
10192 GEN_HANDLER(srw
, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER
),
10193 #if defined(TARGET_PPC64)
10194 GEN_HANDLER(sld
, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B
),
10195 GEN_HANDLER(srad
, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B
),
10196 GEN_HANDLER2(sradi0
, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B
),
10197 GEN_HANDLER2(sradi1
, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B
),
10198 GEN_HANDLER(srd
, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B
),
10200 GEN_HANDLER(frsqrtes
, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES
),
10201 GEN_HANDLER(fsqrt
, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT
),
10202 GEN_HANDLER(fsqrts
, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT
),
10203 GEN_HANDLER(fcmpo
, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT
),
10204 GEN_HANDLER(fcmpu
, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT
),
10205 GEN_HANDLER(fabs
, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT
),
10206 GEN_HANDLER(fmr
, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT
),
10207 GEN_HANDLER(fnabs
, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT
),
10208 GEN_HANDLER(fneg
, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT
),
10209 GEN_HANDLER_E(fcpsgn
, 0x3F, 0x08, 0x00, 0x00000000, PPC_NONE
, PPC2_ISA205
),
10210 GEN_HANDLER_E(fmrgew
, 0x3F, 0x06, 0x1E, 0x00000001, PPC_NONE
, PPC2_VSX207
),
10211 GEN_HANDLER_E(fmrgow
, 0x3F, 0x06, 0x1A, 0x00000001, PPC_NONE
, PPC2_VSX207
),
10212 GEN_HANDLER(mcrfs
, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT
),
10213 GEN_HANDLER(mffs
, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT
),
10214 GEN_HANDLER(mtfsb0
, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT
),
10215 GEN_HANDLER(mtfsb1
, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT
),
10216 GEN_HANDLER(mtfsf
, 0x3F, 0x07, 0x16, 0x00000000, PPC_FLOAT
),
10217 GEN_HANDLER(mtfsfi
, 0x3F, 0x06, 0x04, 0x006e0800, PPC_FLOAT
),
10218 #if defined(TARGET_PPC64)
10219 GEN_HANDLER(ld
, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B
),
10220 GEN_HANDLER(lq
, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX
),
10221 GEN_HANDLER(std
, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B
),
10223 GEN_HANDLER(lmw
, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10224 GEN_HANDLER(stmw
, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
10225 GEN_HANDLER(lswi
, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING
),
10226 GEN_HANDLER(lswx
, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING
),
10227 GEN_HANDLER(stswi
, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING
),
10228 GEN_HANDLER(stswx
, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING
),
10229 GEN_HANDLER(eieio
, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO
),
10230 GEN_HANDLER(isync
, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM
),
10231 GEN_HANDLER_E(lbarx
, 0x1F, 0x14, 0x01, 0, PPC_NONE
, PPC2_ATOMIC_ISA206
),
10232 GEN_HANDLER_E(lharx
, 0x1F, 0x14, 0x03, 0, PPC_NONE
, PPC2_ATOMIC_ISA206
),
10233 GEN_HANDLER(lwarx
, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES
),
10234 GEN_HANDLER_E(stbcx_
, 0x1F, 0x16, 0x15, 0, PPC_NONE
, PPC2_ATOMIC_ISA206
),
10235 GEN_HANDLER_E(sthcx_
, 0x1F, 0x16, 0x16, 0, PPC_NONE
, PPC2_ATOMIC_ISA206
),
10236 GEN_HANDLER2(stwcx_
, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES
),
10237 #if defined(TARGET_PPC64)
10238 GEN_HANDLER(ldarx
, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B
),
10239 GEN_HANDLER_E(lqarx
, 0x1F, 0x14, 0x08, 0, PPC_NONE
, PPC2_LSQ_ISA207
),
10240 GEN_HANDLER2(stdcx_
, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B
),
10241 GEN_HANDLER_E(stqcx_
, 0x1F, 0x16, 0x05, 0, PPC_NONE
, PPC2_LSQ_ISA207
),
10243 GEN_HANDLER(sync
, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC
),
10244 GEN_HANDLER(wait
, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT
),
10245 GEN_HANDLER(b
, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
10246 GEN_HANDLER(bc
, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
10247 GEN_HANDLER(bcctr
, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW
),
10248 GEN_HANDLER(bclr
, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW
),
10249 GEN_HANDLER_E(bctar
, 0x13, 0x10, 0x11, 0, PPC_NONE
, PPC2_BCTAR_ISA207
),
10250 GEN_HANDLER(mcrf
, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER
),
10251 GEN_HANDLER(rfi
, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW
),
10252 #if defined(TARGET_PPC64)
10253 GEN_HANDLER(rfid
, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B
),
10254 GEN_HANDLER_E(doze
, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE
, PPC2_PM_ISA206
),
10255 GEN_HANDLER_E(nap
, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE
, PPC2_PM_ISA206
),
10256 GEN_HANDLER_E(sleep
, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE
, PPC2_PM_ISA206
),
10257 GEN_HANDLER_E(rvwinkle
, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE
, PPC2_PM_ISA206
),
10258 GEN_HANDLER(hrfid
, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H
),
10260 GEN_HANDLER(sc
, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW
),
10261 GEN_HANDLER(tw
, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW
),
10262 GEN_HANDLER(twi
, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
10263 #if defined(TARGET_PPC64)
10264 GEN_HANDLER(td
, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B
),
10265 GEN_HANDLER(tdi
, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B
),
10267 GEN_HANDLER(mcrxr
, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC
),
10268 GEN_HANDLER(mfcr
, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC
),
10269 GEN_HANDLER(mfmsr
, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC
),
10270 GEN_HANDLER(mfspr
, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC
),
10271 GEN_HANDLER(mftb
, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB
),
10272 GEN_HANDLER(mtcrf
, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC
),
10273 #if defined(TARGET_PPC64)
10274 GEN_HANDLER(mtmsrd
, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B
),
10275 GEN_HANDLER_E(setb
, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE
, PPC2_ISA300
),
10277 GEN_HANDLER(mtmsr
, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC
),
10278 GEN_HANDLER(mtspr
, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC
),
10279 GEN_HANDLER(dcbf
, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE
),
10280 GEN_HANDLER(dcbi
, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE
),
10281 GEN_HANDLER(dcbst
, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE
),
10282 GEN_HANDLER(dcbt
, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE
),
10283 GEN_HANDLER(dcbtst
, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE
),
10284 GEN_HANDLER_E(dcbtls
, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE
, PPC2_BOOKE206
),
10285 GEN_HANDLER(dcbz
, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ
),
10286 GEN_HANDLER(dst
, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC
),
10287 GEN_HANDLER(dstst
, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC
),
10288 GEN_HANDLER(dss
, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC
),
10289 GEN_HANDLER(icbi
, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI
),
10290 GEN_HANDLER(dcba
, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA
),
10291 GEN_HANDLER(mfsr
, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT
),
10292 GEN_HANDLER(mfsrin
, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT
),
10293 GEN_HANDLER(mtsr
, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT
),
10294 GEN_HANDLER(mtsrin
, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT
),
10295 #if defined(TARGET_PPC64)
10296 GEN_HANDLER2(mfsr_64b
, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B
),
10297 GEN_HANDLER2(mfsrin_64b
, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
10299 GEN_HANDLER2(mtsr_64b
, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B
),
10300 GEN_HANDLER2(mtsrin_64b
, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
10302 GEN_HANDLER2(slbmte
, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B
),
10303 GEN_HANDLER2(slbmfee
, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B
),
10304 GEN_HANDLER2(slbmfev
, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B
),
10305 GEN_HANDLER2(slbfee_
, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B
),
10307 GEN_HANDLER(tlbia
, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA
),
10308 /* XXX Those instructions will need to be handled differently for
10309 * different ISA versions */
10310 GEN_HANDLER(tlbiel
, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE
),
10311 GEN_HANDLER(tlbie
, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE
),
10312 GEN_HANDLER(tlbsync
, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC
),
10313 #if defined(TARGET_PPC64)
10314 GEN_HANDLER(slbia
, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI
),
10315 GEN_HANDLER(slbie
, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI
),
10317 GEN_HANDLER(eciwx
, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN
),
10318 GEN_HANDLER(ecowx
, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN
),
10319 GEN_HANDLER(abs
, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR
),
10320 GEN_HANDLER(abso
, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR
),
10321 GEN_HANDLER(clcs
, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR
),
10322 GEN_HANDLER(div
, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR
),
10323 GEN_HANDLER(divo
, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR
),
10324 GEN_HANDLER(divs
, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR
),
10325 GEN_HANDLER(divso
, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR
),
10326 GEN_HANDLER(doz
, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR
),
10327 GEN_HANDLER(dozo
, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR
),
10328 GEN_HANDLER(dozi
, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR
),
10329 GEN_HANDLER(lscbx
, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR
),
10330 GEN_HANDLER(maskg
, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR
),
10331 GEN_HANDLER(maskir
, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR
),
10332 GEN_HANDLER(mul
, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR
),
10333 GEN_HANDLER(mulo
, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR
),
10334 GEN_HANDLER(nabs
, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR
),
10335 GEN_HANDLER(nabso
, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR
),
10336 GEN_HANDLER(rlmi
, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR
),
10337 GEN_HANDLER(rrib
, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR
),
10338 GEN_HANDLER(sle
, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR
),
10339 GEN_HANDLER(sleq
, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR
),
10340 GEN_HANDLER(sliq
, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR
),
10341 GEN_HANDLER(slliq
, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR
),
10342 GEN_HANDLER(sllq
, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR
),
10343 GEN_HANDLER(slq
, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR
),
10344 GEN_HANDLER(sraiq
, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR
),
10345 GEN_HANDLER(sraq
, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR
),
10346 GEN_HANDLER(sre
, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR
),
10347 GEN_HANDLER(srea
, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR
),
10348 GEN_HANDLER(sreq
, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR
),
10349 GEN_HANDLER(sriq
, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR
),
10350 GEN_HANDLER(srliq
, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR
),
10351 GEN_HANDLER(srlq
, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR
),
10352 GEN_HANDLER(srq
, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR
),
10353 GEN_HANDLER(dsa
, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC
),
10354 GEN_HANDLER(esa
, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC
),
10355 GEN_HANDLER(mfrom
, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC
),
10356 GEN_HANDLER2(tlbld_6xx
, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB
),
10357 GEN_HANDLER2(tlbli_6xx
, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB
),
10358 GEN_HANDLER2(tlbld_74xx
, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB
),
10359 GEN_HANDLER2(tlbli_74xx
, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB
),
10360 GEN_HANDLER(clf
, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER
),
10361 GEN_HANDLER(cli
, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER
),
10362 GEN_HANDLER(dclst
, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER
),
10363 GEN_HANDLER(mfsri
, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER
),
10364 GEN_HANDLER(rac
, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER
),
10365 GEN_HANDLER(rfsvc
, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER
),
10366 GEN_HANDLER(lfq
, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
10367 GEN_HANDLER(lfqu
, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
10368 GEN_HANDLER(lfqux
, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2
),
10369 GEN_HANDLER(lfqx
, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2
),
10370 GEN_HANDLER(stfq
, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
10371 GEN_HANDLER(stfqu
, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
10372 GEN_HANDLER(stfqux
, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2
),
10373 GEN_HANDLER(stfqx
, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2
),
10374 GEN_HANDLER(mfapidi
, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI
),
10375 GEN_HANDLER(tlbiva
, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA
),
10376 GEN_HANDLER(mfdcr
, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR
),
10377 GEN_HANDLER(mtdcr
, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR
),
10378 GEN_HANDLER(mfdcrx
, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX
),
10379 GEN_HANDLER(mtdcrx
, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX
),
10380 GEN_HANDLER(mfdcrux
, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX
),
10381 GEN_HANDLER(mtdcrux
, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX
),
10382 GEN_HANDLER(dccci
, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON
),
10383 GEN_HANDLER(dcread
, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON
),
10384 GEN_HANDLER2(icbt_40x
, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT
),
10385 GEN_HANDLER(iccci
, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON
),
10386 GEN_HANDLER(icread
, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON
),
10387 GEN_HANDLER2(rfci_40x
, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP
),
10388 GEN_HANDLER_E(rfci
, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE
, PPC2_BOOKE206
),
10389 GEN_HANDLER(rfdi
, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI
),
10390 GEN_HANDLER(rfmci
, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI
),
10391 GEN_HANDLER2(tlbre_40x
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB
),
10392 GEN_HANDLER2(tlbsx_40x
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB
),
10393 GEN_HANDLER2(tlbwe_40x
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB
),
10394 GEN_HANDLER2(tlbre_440
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE
),
10395 GEN_HANDLER2(tlbsx_440
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE
),
10396 GEN_HANDLER2(tlbwe_440
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE
),
10397 GEN_HANDLER2_E(tlbre_booke206
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
10398 PPC_NONE
, PPC2_BOOKE206
),
10399 GEN_HANDLER2_E(tlbsx_booke206
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
10400 PPC_NONE
, PPC2_BOOKE206
),
10401 GEN_HANDLER2_E(tlbwe_booke206
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
10402 PPC_NONE
, PPC2_BOOKE206
),
10403 GEN_HANDLER2_E(tlbivax_booke206
, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
10404 PPC_NONE
, PPC2_BOOKE206
),
10405 GEN_HANDLER2_E(tlbilx_booke206
, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
10406 PPC_NONE
, PPC2_BOOKE206
),
10407 GEN_HANDLER2_E(msgsnd
, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
10408 PPC_NONE
, PPC2_PRCNTL
),
10409 GEN_HANDLER2_E(msgclr
, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
10410 PPC_NONE
, PPC2_PRCNTL
),
10411 GEN_HANDLER(wrtee
, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE
),
10412 GEN_HANDLER(wrteei
, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE
),
10413 GEN_HANDLER(dlmzb
, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC
),
10414 GEN_HANDLER_E(mbar
, 0x1F, 0x16, 0x1a, 0x001FF801,
10415 PPC_BOOKE
, PPC2_BOOKE206
),
10416 GEN_HANDLER(msync_4xx
, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE
),
10417 GEN_HANDLER2_E(icbt_440
, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
10418 PPC_BOOKE
, PPC2_BOOKE206
),
10419 GEN_HANDLER(lvsl
, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC
),
10420 GEN_HANDLER(lvsr
, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC
),
10421 GEN_HANDLER(mfvscr
, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC
),
10422 GEN_HANDLER(mtvscr
, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC
),
10423 GEN_HANDLER(vmladduhm
, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC
),
10424 #if defined(TARGET_PPC64)
10425 GEN_HANDLER_E(maddhd_maddhdu
, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE
,
10427 GEN_HANDLER_E(maddld
, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE
, PPC2_ISA300
),
10429 GEN_HANDLER2(evsel0
, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE
),
10430 GEN_HANDLER2(evsel1
, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE
),
10431 GEN_HANDLER2(evsel2
, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE
),
10432 GEN_HANDLER2(evsel3
, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE
),
10434 #undef GEN_INT_ARITH_ADD
10435 #undef GEN_INT_ARITH_ADD_CONST
10436 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
10437 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
10438 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
10439 add_ca, compute_ca, compute_ov) \
10440 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
10441 GEN_INT_ARITH_ADD(add
, 0x08, 0, 0, 0)
10442 GEN_INT_ARITH_ADD(addo
, 0x18, 0, 0, 1)
10443 GEN_INT_ARITH_ADD(addc
, 0x00, 0, 1, 0)
10444 GEN_INT_ARITH_ADD(addco
, 0x10, 0, 1, 1)
10445 GEN_INT_ARITH_ADD(adde
, 0x04, 1, 1, 0)
10446 GEN_INT_ARITH_ADD(addeo
, 0x14, 1, 1, 1)
10447 GEN_INT_ARITH_ADD_CONST(addme
, 0x07, -1LL, 1, 1, 0)
10448 GEN_INT_ARITH_ADD_CONST(addmeo
, 0x17, -1LL, 1, 1, 1)
10449 GEN_INT_ARITH_ADD_CONST(addze
, 0x06, 0, 1, 1, 0)
10450 GEN_INT_ARITH_ADD_CONST(addzeo
, 0x16, 0, 1, 1, 1)
10452 #undef GEN_INT_ARITH_DIVW
10453 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
10454 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
10455 GEN_INT_ARITH_DIVW(divwu
, 0x0E, 0, 0),
10456 GEN_INT_ARITH_DIVW(divwuo
, 0x1E, 0, 1),
10457 GEN_INT_ARITH_DIVW(divw
, 0x0F, 1, 0),
10458 GEN_INT_ARITH_DIVW(divwo
, 0x1F, 1, 1),
10459 GEN_HANDLER_E(divwe
, 0x1F, 0x0B, 0x0D, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10460 GEN_HANDLER_E(divweo
, 0x1F, 0x0B, 0x1D, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10461 GEN_HANDLER_E(divweu
, 0x1F, 0x0B, 0x0C, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10462 GEN_HANDLER_E(divweuo
, 0x1F, 0x0B, 0x1C, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10463 GEN_HANDLER_E(modsw
, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE
, PPC2_ISA300
),
10464 GEN_HANDLER_E(moduw
, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE
, PPC2_ISA300
),
10466 #if defined(TARGET_PPC64)
10467 #undef GEN_INT_ARITH_DIVD
10468 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
10469 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
10470 GEN_INT_ARITH_DIVD(divdu
, 0x0E, 0, 0),
10471 GEN_INT_ARITH_DIVD(divduo
, 0x1E, 0, 1),
10472 GEN_INT_ARITH_DIVD(divd
, 0x0F, 1, 0),
10473 GEN_INT_ARITH_DIVD(divdo
, 0x1F, 1, 1),
10475 GEN_HANDLER_E(divdeu
, 0x1F, 0x09, 0x0C, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10476 GEN_HANDLER_E(divdeuo
, 0x1F, 0x09, 0x1C, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10477 GEN_HANDLER_E(divde
, 0x1F, 0x09, 0x0D, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10478 GEN_HANDLER_E(divdeo
, 0x1F, 0x09, 0x1D, 0, PPC_NONE
, PPC2_DIVE_ISA206
),
10479 GEN_HANDLER_E(modsd
, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE
, PPC2_ISA300
),
10480 GEN_HANDLER_E(modud
, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE
, PPC2_ISA300
),
10482 #undef GEN_INT_ARITH_MUL_HELPER
10483 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
10484 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
10485 GEN_INT_ARITH_MUL_HELPER(mulhdu
, 0x00),
10486 GEN_INT_ARITH_MUL_HELPER(mulhd
, 0x02),
10487 GEN_INT_ARITH_MUL_HELPER(mulldo
, 0x17),
10490 #undef GEN_INT_ARITH_SUBF
10491 #undef GEN_INT_ARITH_SUBF_CONST
10492 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
10493 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
10494 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
10495 add_ca, compute_ca, compute_ov) \
10496 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
10497 GEN_INT_ARITH_SUBF(subf
, 0x01, 0, 0, 0)
10498 GEN_INT_ARITH_SUBF(subfo
, 0x11, 0, 0, 1)
10499 GEN_INT_ARITH_SUBF(subfc
, 0x00, 0, 1, 0)
10500 GEN_INT_ARITH_SUBF(subfco
, 0x10, 0, 1, 1)
10501 GEN_INT_ARITH_SUBF(subfe
, 0x04, 1, 1, 0)
10502 GEN_INT_ARITH_SUBF(subfeo
, 0x14, 1, 1, 1)
10503 GEN_INT_ARITH_SUBF_CONST(subfme
, 0x07, -1LL, 1, 1, 0)
10504 GEN_INT_ARITH_SUBF_CONST(subfmeo
, 0x17, -1LL, 1, 1, 1)
10505 GEN_INT_ARITH_SUBF_CONST(subfze
, 0x06, 0, 1, 1, 0)
10506 GEN_INT_ARITH_SUBF_CONST(subfzeo
, 0x16, 0, 1, 1, 1)
10508 #undef GEN_LOGICAL1
10509 #undef GEN_LOGICAL2
10510 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
10511 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
10512 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
10513 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
10514 GEN_LOGICAL2(and, tcg_gen_and_tl
, 0x00, PPC_INTEGER
),
10515 GEN_LOGICAL2(andc
, tcg_gen_andc_tl
, 0x01, PPC_INTEGER
),
10516 GEN_LOGICAL2(eqv
, tcg_gen_eqv_tl
, 0x08, PPC_INTEGER
),
10517 GEN_LOGICAL1(extsb
, tcg_gen_ext8s_tl
, 0x1D, PPC_INTEGER
),
10518 GEN_LOGICAL1(extsh
, tcg_gen_ext16s_tl
, 0x1C, PPC_INTEGER
),
10519 GEN_LOGICAL2(nand
, tcg_gen_nand_tl
, 0x0E, PPC_INTEGER
),
10520 GEN_LOGICAL2(nor
, tcg_gen_nor_tl
, 0x03, PPC_INTEGER
),
10521 GEN_LOGICAL2(orc
, tcg_gen_orc_tl
, 0x0C, PPC_INTEGER
),
10522 #if defined(TARGET_PPC64)
10523 GEN_LOGICAL1(extsw
, tcg_gen_ext32s_tl
, 0x1E, PPC_64B
),
10526 #if defined(TARGET_PPC64)
10527 #undef GEN_PPC64_R2
10528 #undef GEN_PPC64_R4
10529 #define GEN_PPC64_R2(name, opc1, opc2) \
10530 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
10531 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
10533 #define GEN_PPC64_R4(name, opc1, opc2) \
10534 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
10535 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
10537 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
10539 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
10541 GEN_PPC64_R4(rldicl
, 0x1E, 0x00),
10542 GEN_PPC64_R4(rldicr
, 0x1E, 0x02),
10543 GEN_PPC64_R4(rldic
, 0x1E, 0x04),
10544 GEN_PPC64_R2(rldcl
, 0x1E, 0x08),
10545 GEN_PPC64_R2(rldcr
, 0x1E, 0x09),
10546 GEN_PPC64_R4(rldimi
, 0x1E, 0x06),
10549 #undef _GEN_FLOAT_ACB
10550 #undef GEN_FLOAT_ACB
10551 #undef _GEN_FLOAT_AB
10552 #undef GEN_FLOAT_AB
10553 #undef _GEN_FLOAT_AC
10554 #undef GEN_FLOAT_AC
10556 #undef GEN_FLOAT_BS
10557 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
10558 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
10559 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
10560 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
10561 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
10562 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
10563 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
10564 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
10565 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
10566 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
10567 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
10568 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
10569 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
10570 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
10571 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
10572 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
10573 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
10574 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
10575 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
10577 GEN_FLOAT_AB(add
, 0x15, 0x000007C0, 1, PPC_FLOAT
),
10578 GEN_FLOAT_AB(div
, 0x12, 0x000007C0, 1, PPC_FLOAT
),
10579 GEN_FLOAT_AC(mul
, 0x19, 0x0000F800, 1, PPC_FLOAT
),
10580 GEN_FLOAT_BS(re
, 0x3F, 0x18, 1, PPC_FLOAT_EXT
),
10581 GEN_FLOAT_BS(res
, 0x3B, 0x18, 1, PPC_FLOAT_FRES
),
10582 GEN_FLOAT_BS(rsqrte
, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE
),
10583 _GEN_FLOAT_ACB(sel
, sel
, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL
),
10584 GEN_FLOAT_AB(sub
, 0x14, 0x000007C0, 1, PPC_FLOAT
),
10585 GEN_FLOAT_ACB(madd
, 0x1D, 1, PPC_FLOAT
),
10586 GEN_FLOAT_ACB(msub
, 0x1C, 1, PPC_FLOAT
),
10587 GEN_FLOAT_ACB(nmadd
, 0x1F, 1, PPC_FLOAT
),
10588 GEN_FLOAT_ACB(nmsub
, 0x1E, 1, PPC_FLOAT
),
10589 GEN_HANDLER_E(ftdiv
, 0x3F, 0x00, 0x04, 1, PPC_NONE
, PPC2_FP_TST_ISA206
),
10590 GEN_HANDLER_E(ftsqrt
, 0x3F, 0x00, 0x05, 1, PPC_NONE
, PPC2_FP_TST_ISA206
),
10591 GEN_FLOAT_B(ctiw
, 0x0E, 0x00, 0, PPC_FLOAT
),
10592 GEN_HANDLER_E(fctiwu
, 0x3F, 0x0E, 0x04, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10593 GEN_FLOAT_B(ctiwz
, 0x0F, 0x00, 0, PPC_FLOAT
),
10594 GEN_HANDLER_E(fctiwuz
, 0x3F, 0x0F, 0x04, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10595 GEN_FLOAT_B(rsp
, 0x0C, 0x00, 1, PPC_FLOAT
),
10596 GEN_HANDLER_E(fcfid
, 0x3F, 0x0E, 0x1A, 0x001F0000, PPC_NONE
, PPC2_FP_CVT_S64
),
10597 GEN_HANDLER_E(fcfids
, 0x3B, 0x0E, 0x1A, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10598 GEN_HANDLER_E(fcfidu
, 0x3F, 0x0E, 0x1E, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10599 GEN_HANDLER_E(fcfidus
, 0x3B, 0x0E, 0x1E, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10600 GEN_HANDLER_E(fctid
, 0x3F, 0x0E, 0x19, 0x001F0000, PPC_NONE
, PPC2_FP_CVT_S64
),
10601 GEN_HANDLER_E(fctidu
, 0x3F, 0x0E, 0x1D, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10602 GEN_HANDLER_E(fctidz
, 0x3F, 0x0F, 0x19, 0x001F0000, PPC_NONE
, PPC2_FP_CVT_S64
),
10603 GEN_HANDLER_E(fctiduz
, 0x3F, 0x0F, 0x1D, 0, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10604 GEN_FLOAT_B(rin
, 0x08, 0x0C, 1, PPC_FLOAT_EXT
),
10605 GEN_FLOAT_B(riz
, 0x08, 0x0D, 1, PPC_FLOAT_EXT
),
10606 GEN_FLOAT_B(rip
, 0x08, 0x0E, 1, PPC_FLOAT_EXT
),
10607 GEN_FLOAT_B(rim
, 0x08, 0x0F, 1, PPC_FLOAT_EXT
),
10614 #define GEN_LD(name, ldop, opc, type) \
10615 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
10616 #define GEN_LDU(name, ldop, opc, type) \
10617 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
10618 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
10619 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
10620 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
10621 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
10622 #define GEN_LDS(name, ldop, op, type) \
10623 GEN_LD(name, ldop, op | 0x20, type) \
10624 GEN_LDU(name, ldop, op | 0x21, type) \
10625 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
10626 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
10628 GEN_LDS(lbz
, ld8u
, 0x02, PPC_INTEGER
)
10629 GEN_LDS(lha
, ld16s
, 0x0A, PPC_INTEGER
)
10630 GEN_LDS(lhz
, ld16u
, 0x08, PPC_INTEGER
)
10631 GEN_LDS(lwz
, ld32u
, 0x00, PPC_INTEGER
)
10632 #if defined(TARGET_PPC64)
10633 GEN_LDUX(lwa
, ld32s
, 0x15, 0x0B, PPC_64B
)
10634 GEN_LDX(lwa
, ld32s
, 0x15, 0x0A, PPC_64B
)
10635 GEN_LDUX(ld
, ld64
, 0x15, 0x01, PPC_64B
)
10636 GEN_LDX(ld
, ld64
, 0x15, 0x00, PPC_64B
)
10637 GEN_LDX_E(ldbr
, ld64ur
, 0x14, 0x10, PPC_NONE
, PPC2_DBRX
, CHK_NONE
)
10639 /* HV/P7 and later only */
10640 GEN_LDX_HVRM(ldcix
, ld64
, 0x15, 0x1b, PPC_CILDST
)
10641 GEN_LDX_HVRM(lwzcix
, ld32u
, 0x15, 0x18, PPC_CILDST
)
10642 GEN_LDX_HVRM(lhzcix
, ld16u
, 0x15, 0x19, PPC_CILDST
)
10643 GEN_LDX_HVRM(lbzcix
, ld8u
, 0x15, 0x1a, PPC_CILDST
)
10645 GEN_LDX(lhbr
, ld16ur
, 0x16, 0x18, PPC_INTEGER
)
10646 GEN_LDX(lwbr
, ld32ur
, 0x16, 0x10, PPC_INTEGER
)
10653 #define GEN_ST(name, stop, opc, type) \
10654 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
10655 #define GEN_STU(name, stop, opc, type) \
10656 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
10657 #define GEN_STUX(name, stop, opc2, opc3, type) \
10658 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
10659 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
10660 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
10661 #define GEN_STS(name, stop, op, type) \
10662 GEN_ST(name, stop, op | 0x20, type) \
10663 GEN_STU(name, stop, op | 0x21, type) \
10664 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
10665 GEN_STX(name, stop, 0x17, op | 0x00, type)
10667 GEN_STS(stb
, st8
, 0x06, PPC_INTEGER
)
10668 GEN_STS(sth
, st16
, 0x0C, PPC_INTEGER
)
10669 GEN_STS(stw
, st32
, 0x04, PPC_INTEGER
)
10670 #if defined(TARGET_PPC64)
10671 GEN_STUX(std
, st64
, 0x15, 0x05, PPC_64B
)
10672 GEN_STX(std
, st64
, 0x15, 0x04, PPC_64B
)
10673 GEN_STX_E(stdbr
, st64r
, 0x14, 0x14, PPC_NONE
, PPC2_DBRX
, CHK_NONE
)
10674 GEN_STX_HVRM(stdcix
, st64
, 0x15, 0x1f, PPC_CILDST
)
10675 GEN_STX_HVRM(stwcix
, st32
, 0x15, 0x1c, PPC_CILDST
)
10676 GEN_STX_HVRM(sthcix
, st16
, 0x15, 0x1d, PPC_CILDST
)
10677 GEN_STX_HVRM(stbcix
, st8
, 0x15, 0x1e, PPC_CILDST
)
10679 GEN_STX(sthbr
, st16r
, 0x16, 0x1C, PPC_INTEGER
)
10680 GEN_STX(stwbr
, st32r
, 0x16, 0x14, PPC_INTEGER
)
10687 #define GEN_LDF(name, ldop, opc, type) \
10688 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
10689 #define GEN_LDUF(name, ldop, opc, type) \
10690 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
10691 #define GEN_LDUXF(name, ldop, opc, type) \
10692 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
10693 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
10694 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
10695 #define GEN_LDFS(name, ldop, op, type) \
10696 GEN_LDF(name, ldop, op | 0x20, type) \
10697 GEN_LDUF(name, ldop, op | 0x21, type) \
10698 GEN_LDUXF(name, ldop, op | 0x01, type) \
10699 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
10701 GEN_LDFS(lfd
, ld64
, 0x12, PPC_FLOAT
)
10702 GEN_LDFS(lfs
, ld32fs
, 0x10, PPC_FLOAT
)
10703 GEN_HANDLER_E(lfiwax
, 0x1f, 0x17, 0x1a, 0x00000001, PPC_NONE
, PPC2_ISA205
),
10704 GEN_HANDLER_E(lfiwzx
, 0x1f, 0x17, 0x1b, 0x1, PPC_NONE
, PPC2_FP_CVT_ISA206
),
10705 GEN_HANDLER_E(lfdp
, 0x39, 0xFF, 0xFF, 0x00200003, PPC_NONE
, PPC2_ISA205
),
10706 GEN_HANDLER_E(lfdpx
, 0x1F, 0x17, 0x18, 0x00200001, PPC_NONE
, PPC2_ISA205
),
10713 #define GEN_STF(name, stop, opc, type) \
10714 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
10715 #define GEN_STUF(name, stop, opc, type) \
10716 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
10717 #define GEN_STUXF(name, stop, opc, type) \
10718 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
10719 #define GEN_STXF(name, stop, opc2, opc3, type) \
10720 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
10721 #define GEN_STFS(name, stop, op, type) \
10722 GEN_STF(name, stop, op | 0x20, type) \
10723 GEN_STUF(name, stop, op | 0x21, type) \
10724 GEN_STUXF(name, stop, op | 0x01, type) \
10725 GEN_STXF(name, stop, 0x17, op | 0x00, type)
10727 GEN_STFS(stfd
, st64
, 0x16, PPC_FLOAT
)
10728 GEN_STFS(stfs
, st32fs
, 0x14, PPC_FLOAT
)
10729 GEN_STXF(stfiw
, st32fiw
, 0x17, 0x1E, PPC_FLOAT_STFIWX
)
10730 GEN_HANDLER_E(stfdp
, 0x3D, 0xFF, 0xFF, 0x00200003, PPC_NONE
, PPC2_ISA205
),
10731 GEN_HANDLER_E(stfdpx
, 0x1F, 0x17, 0x1C, 0x00200001, PPC_NONE
, PPC2_ISA205
),
10734 #define GEN_CRLOGIC(name, tcg_op, opc) \
10735 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
10736 GEN_CRLOGIC(crand
, tcg_gen_and_i32
, 0x08),
10737 GEN_CRLOGIC(crandc
, tcg_gen_andc_i32
, 0x04),
10738 GEN_CRLOGIC(creqv
, tcg_gen_eqv_i32
, 0x09),
10739 GEN_CRLOGIC(crnand
, tcg_gen_nand_i32
, 0x07),
10740 GEN_CRLOGIC(crnor
, tcg_gen_nor_i32
, 0x01),
10741 GEN_CRLOGIC(cror
, tcg_gen_or_i32
, 0x0E),
10742 GEN_CRLOGIC(crorc
, tcg_gen_orc_i32
, 0x0D),
10743 GEN_CRLOGIC(crxor
, tcg_gen_xor_i32
, 0x06),
10745 #undef GEN_MAC_HANDLER
10746 #define GEN_MAC_HANDLER(name, opc2, opc3) \
10747 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
10748 GEN_MAC_HANDLER(macchw
, 0x0C, 0x05),
10749 GEN_MAC_HANDLER(macchwo
, 0x0C, 0x15),
10750 GEN_MAC_HANDLER(macchws
, 0x0C, 0x07),
10751 GEN_MAC_HANDLER(macchwso
, 0x0C, 0x17),
10752 GEN_MAC_HANDLER(macchwsu
, 0x0C, 0x06),
10753 GEN_MAC_HANDLER(macchwsuo
, 0x0C, 0x16),
10754 GEN_MAC_HANDLER(macchwu
, 0x0C, 0x04),
10755 GEN_MAC_HANDLER(macchwuo
, 0x0C, 0x14),
10756 GEN_MAC_HANDLER(machhw
, 0x0C, 0x01),
10757 GEN_MAC_HANDLER(machhwo
, 0x0C, 0x11),
10758 GEN_MAC_HANDLER(machhws
, 0x0C, 0x03),
10759 GEN_MAC_HANDLER(machhwso
, 0x0C, 0x13),
10760 GEN_MAC_HANDLER(machhwsu
, 0x0C, 0x02),
10761 GEN_MAC_HANDLER(machhwsuo
, 0x0C, 0x12),
10762 GEN_MAC_HANDLER(machhwu
, 0x0C, 0x00),
10763 GEN_MAC_HANDLER(machhwuo
, 0x0C, 0x10),
10764 GEN_MAC_HANDLER(maclhw
, 0x0C, 0x0D),
10765 GEN_MAC_HANDLER(maclhwo
, 0x0C, 0x1D),
10766 GEN_MAC_HANDLER(maclhws
, 0x0C, 0x0F),
10767 GEN_MAC_HANDLER(maclhwso
, 0x0C, 0x1F),
10768 GEN_MAC_HANDLER(maclhwu
, 0x0C, 0x0C),
10769 GEN_MAC_HANDLER(maclhwuo
, 0x0C, 0x1C),
10770 GEN_MAC_HANDLER(maclhwsu
, 0x0C, 0x0E),
10771 GEN_MAC_HANDLER(maclhwsuo
, 0x0C, 0x1E),
10772 GEN_MAC_HANDLER(nmacchw
, 0x0E, 0x05),
10773 GEN_MAC_HANDLER(nmacchwo
, 0x0E, 0x15),
10774 GEN_MAC_HANDLER(nmacchws
, 0x0E, 0x07),
10775 GEN_MAC_HANDLER(nmacchwso
, 0x0E, 0x17),
10776 GEN_MAC_HANDLER(nmachhw
, 0x0E, 0x01),
10777 GEN_MAC_HANDLER(nmachhwo
, 0x0E, 0x11),
10778 GEN_MAC_HANDLER(nmachhws
, 0x0E, 0x03),
10779 GEN_MAC_HANDLER(nmachhwso
, 0x0E, 0x13),
10780 GEN_MAC_HANDLER(nmaclhw
, 0x0E, 0x0D),
10781 GEN_MAC_HANDLER(nmaclhwo
, 0x0E, 0x1D),
10782 GEN_MAC_HANDLER(nmaclhws
, 0x0E, 0x0F),
10783 GEN_MAC_HANDLER(nmaclhwso
, 0x0E, 0x1F),
10784 GEN_MAC_HANDLER(mulchw
, 0x08, 0x05),
10785 GEN_MAC_HANDLER(mulchwu
, 0x08, 0x04),
10786 GEN_MAC_HANDLER(mulhhw
, 0x08, 0x01),
10787 GEN_MAC_HANDLER(mulhhwu
, 0x08, 0x00),
10788 GEN_MAC_HANDLER(mullhw
, 0x08, 0x0D),
10789 GEN_MAC_HANDLER(mullhwu
, 0x08, 0x0C),
10795 #define GEN_VR_LDX(name, opc2, opc3) \
10796 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10797 #define GEN_VR_STX(name, opc2, opc3) \
10798 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10799 #define GEN_VR_LVE(name, opc2, opc3) \
10800 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10801 #define GEN_VR_STVE(name, opc2, opc3) \
10802 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
10803 GEN_VR_LDX(lvx
, 0x07, 0x03),
10804 GEN_VR_LDX(lvxl
, 0x07, 0x0B),
10805 GEN_VR_LVE(bx
, 0x07, 0x00),
10806 GEN_VR_LVE(hx
, 0x07, 0x01),
10807 GEN_VR_LVE(wx
, 0x07, 0x02),
10808 GEN_VR_STX(svx
, 0x07, 0x07),
10809 GEN_VR_STX(svxl
, 0x07, 0x0F),
10810 GEN_VR_STVE(bx
, 0x07, 0x04),
10811 GEN_VR_STVE(hx
, 0x07, 0x05),
10812 GEN_VR_STVE(wx
, 0x07, 0x06),
10814 #undef GEN_VX_LOGICAL
10815 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
10816 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10818 #undef GEN_VX_LOGICAL_207
10819 #define GEN_VX_LOGICAL_207(name, tcg_op, opc2, opc3) \
10820 GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
10822 GEN_VX_LOGICAL(vand
, tcg_gen_and_i64
, 2, 16),
10823 GEN_VX_LOGICAL(vandc
, tcg_gen_andc_i64
, 2, 17),
10824 GEN_VX_LOGICAL(vor
, tcg_gen_or_i64
, 2, 18),
10825 GEN_VX_LOGICAL(vxor
, tcg_gen_xor_i64
, 2, 19),
10826 GEN_VX_LOGICAL(vnor
, tcg_gen_nor_i64
, 2, 20),
10827 GEN_VX_LOGICAL_207(veqv
, tcg_gen_eqv_i64
, 2, 26),
10828 GEN_VX_LOGICAL_207(vnand
, tcg_gen_nand_i64
, 2, 22),
10829 GEN_VX_LOGICAL_207(vorc
, tcg_gen_orc_i64
, 2, 21),
10832 #define GEN_VXFORM(name, opc2, opc3) \
10833 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10835 #undef GEN_VXFORM_207
10836 #define GEN_VXFORM_207(name, opc2, opc3) \
10837 GEN_HANDLER_E(name, 0x04, opc2, opc3, 0x00000000, PPC_NONE, PPC2_ALTIVEC_207)
10839 #undef GEN_VXFORM_DUAL
10840 #define GEN_VXFORM_DUAL(name0, name1, opc2, opc3, type0, type1) \
10841 GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, type0, type1)
10843 #undef GEN_VXRFORM_DUAL
10844 #define GEN_VXRFORM_DUAL(name0, name1, opc2, opc3, tp0, tp1) \
10845 GEN_HANDLER_E(name0##_##name1, 0x4, opc2, opc3, 0x00000000, tp0, tp1), \
10846 GEN_HANDLER_E(name0##_##name1, 0x4, opc2, (opc3 | 0x10), 0x00000000, tp0, tp1),
10848 GEN_VXFORM(vaddubm
, 0, 0),
10849 GEN_VXFORM(vadduhm
, 0, 1),
10850 GEN_VXFORM(vadduwm
, 0, 2),
10851 GEN_VXFORM_207(vaddudm
, 0, 3),
10852 GEN_VXFORM_DUAL(vsububm
, bcdadd
, 0, 16, PPC_ALTIVEC
, PPC_NONE
),
10853 GEN_VXFORM_DUAL(vsubuhm
, bcdsub
, 0, 17, PPC_ALTIVEC
, PPC_NONE
),
10854 GEN_VXFORM(vsubuwm
, 0, 18),
10855 GEN_VXFORM_207(vsubudm
, 0, 19),
10856 GEN_VXFORM(vmaxub
, 1, 0),
10857 GEN_VXFORM(vmaxuh
, 1, 1),
10858 GEN_VXFORM(vmaxuw
, 1, 2),
10859 GEN_VXFORM_207(vmaxud
, 1, 3),
10860 GEN_VXFORM(vmaxsb
, 1, 4),
10861 GEN_VXFORM(vmaxsh
, 1, 5),
10862 GEN_VXFORM(vmaxsw
, 1, 6),
10863 GEN_VXFORM_207(vmaxsd
, 1, 7),
10864 GEN_VXFORM(vminub
, 1, 8),
10865 GEN_VXFORM(vminuh
, 1, 9),
10866 GEN_VXFORM(vminuw
, 1, 10),
10867 GEN_VXFORM_207(vminud
, 1, 11),
10868 GEN_VXFORM(vminsb
, 1, 12),
10869 GEN_VXFORM(vminsh
, 1, 13),
10870 GEN_VXFORM(vminsw
, 1, 14),
10871 GEN_VXFORM_207(vminsd
, 1, 15),
10872 GEN_VXFORM(vavgub
, 1, 16),
10873 GEN_VXFORM(vavguh
, 1, 17),
10874 GEN_VXFORM(vavguw
, 1, 18),
10875 GEN_VXFORM(vavgsb
, 1, 20),
10876 GEN_VXFORM(vavgsh
, 1, 21),
10877 GEN_VXFORM(vavgsw
, 1, 22),
10878 GEN_VXFORM(vmrghb
, 6, 0),
10879 GEN_VXFORM(vmrghh
, 6, 1),
10880 GEN_VXFORM(vmrghw
, 6, 2),
10881 GEN_VXFORM(vmrglb
, 6, 4),
10882 GEN_VXFORM(vmrglh
, 6, 5),
10883 GEN_VXFORM(vmrglw
, 6, 6),
10884 GEN_VXFORM_207(vmrgew
, 6, 30),
10885 GEN_VXFORM_207(vmrgow
, 6, 26),
10886 GEN_VXFORM(vmuloub
, 4, 0),
10887 GEN_VXFORM(vmulouh
, 4, 1),
10888 GEN_VXFORM_DUAL(vmulouw
, vmuluwm
, 4, 2, PPC_ALTIVEC
, PPC_NONE
),
10889 GEN_VXFORM(vmulosb
, 4, 4),
10890 GEN_VXFORM(vmulosh
, 4, 5),
10891 GEN_VXFORM_207(vmulosw
, 4, 6),
10892 GEN_VXFORM(vmuleub
, 4, 8),
10893 GEN_VXFORM(vmuleuh
, 4, 9),
10894 GEN_VXFORM_207(vmuleuw
, 4, 10),
10895 GEN_VXFORM(vmulesb
, 4, 12),
10896 GEN_VXFORM(vmulesh
, 4, 13),
10897 GEN_VXFORM_207(vmulesw
, 4, 14),
10898 GEN_VXFORM(vslb
, 2, 4),
10899 GEN_VXFORM(vslh
, 2, 5),
10900 GEN_VXFORM(vslw
, 2, 6),
10901 GEN_VXFORM_207(vsld
, 2, 23),
10902 GEN_VXFORM(vsrb
, 2, 8),
10903 GEN_VXFORM(vsrh
, 2, 9),
10904 GEN_VXFORM(vsrw
, 2, 10),
10905 GEN_VXFORM_207(vsrd
, 2, 27),
10906 GEN_VXFORM(vsrab
, 2, 12),
10907 GEN_VXFORM(vsrah
, 2, 13),
10908 GEN_VXFORM(vsraw
, 2, 14),
10909 GEN_VXFORM_207(vsrad
, 2, 15),
10910 GEN_VXFORM(vslo
, 6, 16),
10911 GEN_VXFORM(vsro
, 6, 17),
10912 GEN_VXFORM(vaddcuw
, 0, 6),
10913 GEN_VXFORM(vsubcuw
, 0, 22),
10914 GEN_VXFORM(vaddubs
, 0, 8),
10915 GEN_VXFORM(vadduhs
, 0, 9),
10916 GEN_VXFORM(vadduws
, 0, 10),
10917 GEN_VXFORM(vaddsbs
, 0, 12),
10918 GEN_VXFORM(vaddshs
, 0, 13),
10919 GEN_VXFORM(vaddsws
, 0, 14),
10920 GEN_VXFORM_DUAL(vsububs
, bcdadd
, 0, 24, PPC_ALTIVEC
, PPC_NONE
),
10921 GEN_VXFORM_DUAL(vsubuhs
, bcdsub
, 0, 25, PPC_ALTIVEC
, PPC_NONE
),
10922 GEN_VXFORM(vsubuws
, 0, 26),
10923 GEN_VXFORM(vsubsbs
, 0, 28),
10924 GEN_VXFORM(vsubshs
, 0, 29),
10925 GEN_VXFORM(vsubsws
, 0, 30),
10926 GEN_VXFORM_207(vadduqm
, 0, 4),
10927 GEN_VXFORM_207(vaddcuq
, 0, 5),
10928 GEN_VXFORM_DUAL(vaddeuqm
, vaddecuq
, 30, 0xFF, PPC_NONE
, PPC2_ALTIVEC_207
),
10929 GEN_VXFORM_207(vsubuqm
, 0, 20),
10930 GEN_VXFORM_207(vsubcuq
, 0, 21),
10931 GEN_VXFORM_DUAL(vsubeuqm
, vsubecuq
, 31, 0xFF, PPC_NONE
, PPC2_ALTIVEC_207
),
10932 GEN_VXFORM(vrlb
, 2, 0),
10933 GEN_VXFORM(vrlh
, 2, 1),
10934 GEN_VXFORM(vrlw
, 2, 2),
10935 GEN_VXFORM_207(vrld
, 2, 3),
10936 GEN_VXFORM(vsl
, 2, 7),
10937 GEN_VXFORM(vsr
, 2, 11),
10938 GEN_VXFORM(vpkuhum
, 7, 0),
10939 GEN_VXFORM(vpkuwum
, 7, 1),
10940 GEN_VXFORM_207(vpkudum
, 7, 17),
10941 GEN_VXFORM(vpkuhus
, 7, 2),
10942 GEN_VXFORM(vpkuwus
, 7, 3),
10943 GEN_VXFORM_207(vpkudus
, 7, 19),
10944 GEN_VXFORM(vpkshus
, 7, 4),
10945 GEN_VXFORM(vpkswus
, 7, 5),
10946 GEN_VXFORM_207(vpksdus
, 7, 21),
10947 GEN_VXFORM(vpkshss
, 7, 6),
10948 GEN_VXFORM(vpkswss
, 7, 7),
10949 GEN_VXFORM_207(vpksdss
, 7, 23),
10950 GEN_VXFORM(vpkpx
, 7, 12),
10951 GEN_VXFORM(vsum4ubs
, 4, 24),
10952 GEN_VXFORM(vsum4sbs
, 4, 28),
10953 GEN_VXFORM(vsum4shs
, 4, 25),
10954 GEN_VXFORM(vsum2sws
, 4, 26),
10955 GEN_VXFORM(vsumsws
, 4, 30),
10956 GEN_VXFORM(vaddfp
, 5, 0),
10957 GEN_VXFORM(vsubfp
, 5, 1),
10958 GEN_VXFORM(vmaxfp
, 5, 16),
10959 GEN_VXFORM(vminfp
, 5, 17),
10961 #undef GEN_VXRFORM1
10963 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
10964 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
10965 #define GEN_VXRFORM(name, opc2, opc3) \
10966 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
10967 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
10968 GEN_VXRFORM(vcmpequb
, 3, 0)
10969 GEN_VXRFORM(vcmpequh
, 3, 1)
10970 GEN_VXRFORM(vcmpequw
, 3, 2)
10971 GEN_VXRFORM(vcmpgtsb
, 3, 12)
10972 GEN_VXRFORM(vcmpgtsh
, 3, 13)
10973 GEN_VXRFORM(vcmpgtsw
, 3, 14)
10974 GEN_VXRFORM(vcmpgtub
, 3, 8)
10975 GEN_VXRFORM(vcmpgtuh
, 3, 9)
10976 GEN_VXRFORM(vcmpgtuw
, 3, 10)
10977 GEN_VXRFORM_DUAL(vcmpeqfp
, vcmpequd
, 3, 3, PPC_ALTIVEC
, PPC_NONE
)
10978 GEN_VXRFORM(vcmpgefp
, 3, 7)
10979 GEN_VXRFORM_DUAL(vcmpgtfp
, vcmpgtud
, 3, 11, PPC_ALTIVEC
, PPC_NONE
)
10980 GEN_VXRFORM_DUAL(vcmpbfp
, vcmpgtsd
, 3, 15, PPC_ALTIVEC
, PPC_NONE
)
10982 #undef GEN_VXFORM_SIMM
10983 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
10984 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
10985 GEN_VXFORM_SIMM(vspltisb
, 6, 12),
10986 GEN_VXFORM_SIMM(vspltish
, 6, 13),
10987 GEN_VXFORM_SIMM(vspltisw
, 6, 14),
10989 #undef GEN_VXFORM_NOA
10990 #define GEN_VXFORM_NOA(name, opc2, opc3) \
10991 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
10992 GEN_VXFORM_NOA(vupkhsb
, 7, 8),
10993 GEN_VXFORM_NOA(vupkhsh
, 7, 9),
10994 GEN_VXFORM_207(vupkhsw
, 7, 25),
10995 GEN_VXFORM_NOA(vupklsb
, 7, 10),
10996 GEN_VXFORM_NOA(vupklsh
, 7, 11),
10997 GEN_VXFORM_207(vupklsw
, 7, 27),
10998 GEN_VXFORM_NOA(vupkhpx
, 7, 13),
10999 GEN_VXFORM_NOA(vupklpx
, 7, 15),
11000 GEN_VXFORM_NOA(vrefp
, 5, 4),
11001 GEN_VXFORM_NOA(vrsqrtefp
, 5, 5),
11002 GEN_VXFORM_NOA(vexptefp
, 5, 6),
11003 GEN_VXFORM_NOA(vlogefp
, 5, 7),
11004 GEN_VXFORM_NOA(vrfim
, 5, 11),
11005 GEN_VXFORM_NOA(vrfin
, 5, 8),
11006 GEN_VXFORM_NOA(vrfip
, 5, 10),
11007 GEN_VXFORM_NOA(vrfiz
, 5, 9),
11009 #undef GEN_VXFORM_UIMM
11010 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
11011 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
11012 GEN_VXFORM_UIMM(vspltb
, 6, 8),
11013 GEN_VXFORM_UIMM(vsplth
, 6, 9),
11014 GEN_VXFORM_UIMM(vspltw
, 6, 10),
11015 GEN_VXFORM_UIMM(vcfux
, 5, 12),
11016 GEN_VXFORM_UIMM(vcfsx
, 5, 13),
11017 GEN_VXFORM_UIMM(vctuxs
, 5, 14),
11018 GEN_VXFORM_UIMM(vctsxs
, 5, 15),
11020 #undef GEN_VAFORM_PAIRED
11021 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
11022 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
11023 GEN_VAFORM_PAIRED(vmhaddshs
, vmhraddshs
, 16),
11024 GEN_VAFORM_PAIRED(vmsumubm
, vmsummbm
, 18),
11025 GEN_VAFORM_PAIRED(vmsumuhm
, vmsumuhs
, 19),
11026 GEN_VAFORM_PAIRED(vmsumshm
, vmsumshs
, 20),
11027 GEN_VAFORM_PAIRED(vsel
, vperm
, 21),
11028 GEN_VAFORM_PAIRED(vmaddfp
, vnmsubfp
, 23),
11030 GEN_VXFORM_DUAL(vclzb
, vpopcntb
, 1, 28, PPC_NONE
, PPC2_ALTIVEC_207
),
11031 GEN_VXFORM_DUAL(vclzh
, vpopcnth
, 1, 29, PPC_NONE
, PPC2_ALTIVEC_207
),
11032 GEN_VXFORM_DUAL(vclzw
, vpopcntw
, 1, 30, PPC_NONE
, PPC2_ALTIVEC_207
),
11033 GEN_VXFORM_DUAL(vclzd
, vpopcntd
, 1, 31, PPC_NONE
, PPC2_ALTIVEC_207
),
11035 GEN_VXFORM_207(vbpermq
, 6, 21),
11036 GEN_VXFORM_207(vgbbd
, 6, 20),
11037 GEN_VXFORM_207(vpmsumb
, 4, 16),
11038 GEN_VXFORM_207(vpmsumh
, 4, 17),
11039 GEN_VXFORM_207(vpmsumw
, 4, 18),
11040 GEN_VXFORM_207(vpmsumd
, 4, 19),
11042 GEN_VXFORM_207(vsbox
, 4, 23),
11044 GEN_VXFORM_DUAL(vcipher
, vcipherlast
, 4, 20, PPC_NONE
, PPC2_ALTIVEC_207
),
11045 GEN_VXFORM_DUAL(vncipher
, vncipherlast
, 4, 21, PPC_NONE
, PPC2_ALTIVEC_207
),
11047 GEN_VXFORM_207(vshasigmaw
, 1, 26),
11048 GEN_VXFORM_207(vshasigmad
, 1, 27),
11050 GEN_VXFORM_DUAL(vsldoi
, vpermxor
, 22, 0xFF, PPC_ALTIVEC
, PPC_NONE
),
11052 GEN_HANDLER_E(lxsdx
, 0x1F, 0x0C, 0x12, 0, PPC_NONE
, PPC2_VSX
),
11053 GEN_HANDLER_E(lxsiwax
, 0x1F, 0x0C, 0x02, 0, PPC_NONE
, PPC2_VSX207
),
11054 GEN_HANDLER_E(lxsiwzx
, 0x1F, 0x0C, 0x00, 0, PPC_NONE
, PPC2_VSX207
),
11055 GEN_HANDLER_E(lxsspx
, 0x1F, 0x0C, 0x10, 0, PPC_NONE
, PPC2_VSX207
),
11056 GEN_HANDLER_E(lxvd2x
, 0x1F, 0x0C, 0x1A, 0, PPC_NONE
, PPC2_VSX
),
11057 GEN_HANDLER_E(lxvdsx
, 0x1F, 0x0C, 0x0A, 0, PPC_NONE
, PPC2_VSX
),
11058 GEN_HANDLER_E(lxvw4x
, 0x1F, 0x0C, 0x18, 0, PPC_NONE
, PPC2_VSX
),
11060 GEN_HANDLER_E(stxsdx
, 0x1F, 0xC, 0x16, 0, PPC_NONE
, PPC2_VSX
),
11061 GEN_HANDLER_E(stxsiwx
, 0x1F, 0xC, 0x04, 0, PPC_NONE
, PPC2_VSX207
),
11062 GEN_HANDLER_E(stxsspx
, 0x1F, 0xC, 0x14, 0, PPC_NONE
, PPC2_VSX207
),
11063 GEN_HANDLER_E(stxvd2x
, 0x1F, 0xC, 0x1E, 0, PPC_NONE
, PPC2_VSX
),
11064 GEN_HANDLER_E(stxvw4x
, 0x1F, 0xC, 0x1C, 0, PPC_NONE
, PPC2_VSX
),
11066 GEN_HANDLER_E(mfvsrwz
, 0x1F, 0x13, 0x03, 0x0000F800, PPC_NONE
, PPC2_VSX207
),
11067 GEN_HANDLER_E(mtvsrwa
, 0x1F, 0x13, 0x06, 0x0000F800, PPC_NONE
, PPC2_VSX207
),
11068 GEN_HANDLER_E(mtvsrwz
, 0x1F, 0x13, 0x07, 0x0000F800, PPC_NONE
, PPC2_VSX207
),
11069 #if defined(TARGET_PPC64)
11070 GEN_HANDLER_E(mfvsrd
, 0x1F, 0x13, 0x01, 0x0000F800, PPC_NONE
, PPC2_VSX207
),
11071 GEN_HANDLER_E(mtvsrd
, 0x1F, 0x13, 0x05, 0x0000F800, PPC_NONE
, PPC2_VSX207
),
11075 #define GEN_XX2FORM(name, opc2, opc3, fl2) \
11076 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
11077 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2)
11080 #define GEN_XX3FORM(name, opc2, opc3, fl2) \
11081 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
11082 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2), \
11083 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 2, opc3, 0, PPC_NONE, fl2), \
11084 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 3, opc3, 0, PPC_NONE, fl2)
11086 #undef GEN_XX2IFORM
11087 #define GEN_XX2IFORM(name, opc2, opc3, fl2) \
11088 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 1, PPC_NONE, fl2), \
11089 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 1, PPC_NONE, fl2), \
11090 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 2, opc3, 1, PPC_NONE, fl2), \
11091 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 3, opc3, 1, PPC_NONE, fl2)
11093 #undef GEN_XX3_RC_FORM
11094 #define GEN_XX3_RC_FORM(name, opc2, opc3, fl2) \
11095 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x00, opc3 | 0x00, 0, PPC_NONE, fl2), \
11096 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x01, opc3 | 0x00, 0, PPC_NONE, fl2), \
11097 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x02, opc3 | 0x00, 0, PPC_NONE, fl2), \
11098 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x03, opc3 | 0x00, 0, PPC_NONE, fl2), \
11099 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x00, opc3 | 0x10, 0, PPC_NONE, fl2), \
11100 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x01, opc3 | 0x10, 0, PPC_NONE, fl2), \
11101 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x02, opc3 | 0x10, 0, PPC_NONE, fl2), \
11102 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0x03, opc3 | 0x10, 0, PPC_NONE, fl2)
11104 #undef GEN_XX3FORM_DM
11105 #define GEN_XX3FORM_DM(name, opc2, opc3) \
11106 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
11107 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
11108 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
11109 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
11110 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
11111 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
11112 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
11113 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
11114 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
11115 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
11116 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
11117 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
11118 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
11119 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
11120 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
11121 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x0C, 0, PPC_NONE, PPC2_VSX)
11123 GEN_XX2FORM(xsabsdp
, 0x12, 0x15, PPC2_VSX
),
11124 GEN_XX2FORM(xsnabsdp
, 0x12, 0x16, PPC2_VSX
),
11125 GEN_XX2FORM(xsnegdp
, 0x12, 0x17, PPC2_VSX
),
11126 GEN_XX3FORM(xscpsgndp
, 0x00, 0x16, PPC2_VSX
),
11128 GEN_XX2FORM(xvabsdp
, 0x12, 0x1D, PPC2_VSX
),
11129 GEN_XX2FORM(xvnabsdp
, 0x12, 0x1E, PPC2_VSX
),
11130 GEN_XX2FORM(xvnegdp
, 0x12, 0x1F, PPC2_VSX
),
11131 GEN_XX3FORM(xvcpsgndp
, 0x00, 0x1E, PPC2_VSX
),
11132 GEN_XX2FORM(xvabssp
, 0x12, 0x19, PPC2_VSX
),
11133 GEN_XX2FORM(xvnabssp
, 0x12, 0x1A, PPC2_VSX
),
11134 GEN_XX2FORM(xvnegsp
, 0x12, 0x1B, PPC2_VSX
),
11135 GEN_XX3FORM(xvcpsgnsp
, 0x00, 0x1A, PPC2_VSX
),
11137 GEN_XX3FORM(xsadddp
, 0x00, 0x04, PPC2_VSX
),
11138 GEN_XX3FORM(xssubdp
, 0x00, 0x05, PPC2_VSX
),
11139 GEN_XX3FORM(xsmuldp
, 0x00, 0x06, PPC2_VSX
),
11140 GEN_XX3FORM(xsdivdp
, 0x00, 0x07, PPC2_VSX
),
11141 GEN_XX2FORM(xsredp
, 0x14, 0x05, PPC2_VSX
),
11142 GEN_XX2FORM(xssqrtdp
, 0x16, 0x04, PPC2_VSX
),
11143 GEN_XX2FORM(xsrsqrtedp
, 0x14, 0x04, PPC2_VSX
),
11144 GEN_XX3FORM(xstdivdp
, 0x14, 0x07, PPC2_VSX
),
11145 GEN_XX2FORM(xstsqrtdp
, 0x14, 0x06, PPC2_VSX
),
11146 GEN_XX3FORM(xsmaddadp
, 0x04, 0x04, PPC2_VSX
),
11147 GEN_XX3FORM(xsmaddmdp
, 0x04, 0x05, PPC2_VSX
),
11148 GEN_XX3FORM(xsmsubadp
, 0x04, 0x06, PPC2_VSX
),
11149 GEN_XX3FORM(xsmsubmdp
, 0x04, 0x07, PPC2_VSX
),
11150 GEN_XX3FORM(xsnmaddadp
, 0x04, 0x14, PPC2_VSX
),
11151 GEN_XX3FORM(xsnmaddmdp
, 0x04, 0x15, PPC2_VSX
),
11152 GEN_XX3FORM(xsnmsubadp
, 0x04, 0x16, PPC2_VSX
),
11153 GEN_XX3FORM(xsnmsubmdp
, 0x04, 0x17, PPC2_VSX
),
11154 GEN_XX2IFORM(xscmpodp
, 0x0C, 0x05, PPC2_VSX
),
11155 GEN_XX2IFORM(xscmpudp
, 0x0C, 0x04, PPC2_VSX
),
11156 GEN_XX3FORM(xsmaxdp
, 0x00, 0x14, PPC2_VSX
),
11157 GEN_XX3FORM(xsmindp
, 0x00, 0x15, PPC2_VSX
),
11158 GEN_XX2FORM(xscvdpsp
, 0x12, 0x10, PPC2_VSX
),
11159 GEN_XX2FORM(xscvdpspn
, 0x16, 0x10, PPC2_VSX207
),
11160 GEN_XX2FORM(xscvspdp
, 0x12, 0x14, PPC2_VSX
),
11161 GEN_XX2FORM(xscvspdpn
, 0x16, 0x14, PPC2_VSX207
),
11162 GEN_XX2FORM(xscvdpsxds
, 0x10, 0x15, PPC2_VSX
),
11163 GEN_XX2FORM(xscvdpsxws
, 0x10, 0x05, PPC2_VSX
),
11164 GEN_XX2FORM(xscvdpuxds
, 0x10, 0x14, PPC2_VSX
),
11165 GEN_XX2FORM(xscvdpuxws
, 0x10, 0x04, PPC2_VSX
),
11166 GEN_XX2FORM(xscvsxddp
, 0x10, 0x17, PPC2_VSX
),
11167 GEN_XX2FORM(xscvuxddp
, 0x10, 0x16, PPC2_VSX
),
11168 GEN_XX2FORM(xsrdpi
, 0x12, 0x04, PPC2_VSX
),
11169 GEN_XX2FORM(xsrdpic
, 0x16, 0x06, PPC2_VSX
),
11170 GEN_XX2FORM(xsrdpim
, 0x12, 0x07, PPC2_VSX
),
11171 GEN_XX2FORM(xsrdpip
, 0x12, 0x06, PPC2_VSX
),
11172 GEN_XX2FORM(xsrdpiz
, 0x12, 0x05, PPC2_VSX
),
11174 GEN_XX3FORM(xsaddsp
, 0x00, 0x00, PPC2_VSX207
),
11175 GEN_XX3FORM(xssubsp
, 0x00, 0x01, PPC2_VSX207
),
11176 GEN_XX3FORM(xsmulsp
, 0x00, 0x02, PPC2_VSX207
),
11177 GEN_XX3FORM(xsdivsp
, 0x00, 0x03, PPC2_VSX207
),
11178 GEN_XX2FORM(xsresp
, 0x14, 0x01, PPC2_VSX207
),
11179 GEN_XX2FORM(xsrsp
, 0x12, 0x11, PPC2_VSX207
),
11180 GEN_XX2FORM(xssqrtsp
, 0x16, 0x00, PPC2_VSX207
),
11181 GEN_XX2FORM(xsrsqrtesp
, 0x14, 0x00, PPC2_VSX207
),
11182 GEN_XX3FORM(xsmaddasp
, 0x04, 0x00, PPC2_VSX207
),
11183 GEN_XX3FORM(xsmaddmsp
, 0x04, 0x01, PPC2_VSX207
),
11184 GEN_XX3FORM(xsmsubasp
, 0x04, 0x02, PPC2_VSX207
),
11185 GEN_XX3FORM(xsmsubmsp
, 0x04, 0x03, PPC2_VSX207
),
11186 GEN_XX3FORM(xsnmaddasp
, 0x04, 0x10, PPC2_VSX207
),
11187 GEN_XX3FORM(xsnmaddmsp
, 0x04, 0x11, PPC2_VSX207
),
11188 GEN_XX3FORM(xsnmsubasp
, 0x04, 0x12, PPC2_VSX207
),
11189 GEN_XX3FORM(xsnmsubmsp
, 0x04, 0x13, PPC2_VSX207
),
11190 GEN_XX2FORM(xscvsxdsp
, 0x10, 0x13, PPC2_VSX207
),
11191 GEN_XX2FORM(xscvuxdsp
, 0x10, 0x12, PPC2_VSX207
),
11193 GEN_XX3FORM(xvadddp
, 0x00, 0x0C, PPC2_VSX
),
11194 GEN_XX3FORM(xvsubdp
, 0x00, 0x0D, PPC2_VSX
),
11195 GEN_XX3FORM(xvmuldp
, 0x00, 0x0E, PPC2_VSX
),
11196 GEN_XX3FORM(xvdivdp
, 0x00, 0x0F, PPC2_VSX
),
11197 GEN_XX2FORM(xvredp
, 0x14, 0x0D, PPC2_VSX
),
11198 GEN_XX2FORM(xvsqrtdp
, 0x16, 0x0C, PPC2_VSX
),
11199 GEN_XX2FORM(xvrsqrtedp
, 0x14, 0x0C, PPC2_VSX
),
11200 GEN_XX3FORM(xvtdivdp
, 0x14, 0x0F, PPC2_VSX
),
11201 GEN_XX2FORM(xvtsqrtdp
, 0x14, 0x0E, PPC2_VSX
),
11202 GEN_XX3FORM(xvmaddadp
, 0x04, 0x0C, PPC2_VSX
),
11203 GEN_XX3FORM(xvmaddmdp
, 0x04, 0x0D, PPC2_VSX
),
11204 GEN_XX3FORM(xvmsubadp
, 0x04, 0x0E, PPC2_VSX
),
11205 GEN_XX3FORM(xvmsubmdp
, 0x04, 0x0F, PPC2_VSX
),
11206 GEN_XX3FORM(xvnmaddadp
, 0x04, 0x1C, PPC2_VSX
),
11207 GEN_XX3FORM(xvnmaddmdp
, 0x04, 0x1D, PPC2_VSX
),
11208 GEN_XX3FORM(xvnmsubadp
, 0x04, 0x1E, PPC2_VSX
),
11209 GEN_XX3FORM(xvnmsubmdp
, 0x04, 0x1F, PPC2_VSX
),
11210 GEN_XX3FORM(xvmaxdp
, 0x00, 0x1C, PPC2_VSX
),
11211 GEN_XX3FORM(xvmindp
, 0x00, 0x1D, PPC2_VSX
),
11212 GEN_XX3_RC_FORM(xvcmpeqdp
, 0x0C, 0x0C, PPC2_VSX
),
11213 GEN_XX3_RC_FORM(xvcmpgtdp
, 0x0C, 0x0D, PPC2_VSX
),
11214 GEN_XX3_RC_FORM(xvcmpgedp
, 0x0C, 0x0E, PPC2_VSX
),
11215 GEN_XX2FORM(xvcvdpsp
, 0x12, 0x18, PPC2_VSX
),
11216 GEN_XX2FORM(xvcvdpsxds
, 0x10, 0x1D, PPC2_VSX
),
11217 GEN_XX2FORM(xvcvdpsxws
, 0x10, 0x0D, PPC2_VSX
),
11218 GEN_XX2FORM(xvcvdpuxds
, 0x10, 0x1C, PPC2_VSX
),
11219 GEN_XX2FORM(xvcvdpuxws
, 0x10, 0x0C, PPC2_VSX
),
11220 GEN_XX2FORM(xvcvsxddp
, 0x10, 0x1F, PPC2_VSX
),
11221 GEN_XX2FORM(xvcvuxddp
, 0x10, 0x1E, PPC2_VSX
),
11222 GEN_XX2FORM(xvcvsxwdp
, 0x10, 0x0F, PPC2_VSX
),
11223 GEN_XX2FORM(xvcvuxwdp
, 0x10, 0x0E, PPC2_VSX
),
11224 GEN_XX2FORM(xvrdpi
, 0x12, 0x0C, PPC2_VSX
),
11225 GEN_XX2FORM(xvrdpic
, 0x16, 0x0E, PPC2_VSX
),
11226 GEN_XX2FORM(xvrdpim
, 0x12, 0x0F, PPC2_VSX
),
11227 GEN_XX2FORM(xvrdpip
, 0x12, 0x0E, PPC2_VSX
),
11228 GEN_XX2FORM(xvrdpiz
, 0x12, 0x0D, PPC2_VSX
),
11230 GEN_XX3FORM(xvaddsp
, 0x00, 0x08, PPC2_VSX
),
11231 GEN_XX3FORM(xvsubsp
, 0x00, 0x09, PPC2_VSX
),
11232 GEN_XX3FORM(xvmulsp
, 0x00, 0x0A, PPC2_VSX
),
11233 GEN_XX3FORM(xvdivsp
, 0x00, 0x0B, PPC2_VSX
),
11234 GEN_XX2FORM(xvresp
, 0x14, 0x09, PPC2_VSX
),
11235 GEN_XX2FORM(xvsqrtsp
, 0x16, 0x08, PPC2_VSX
),
11236 GEN_XX2FORM(xvrsqrtesp
, 0x14, 0x08, PPC2_VSX
),
11237 GEN_XX3FORM(xvtdivsp
, 0x14, 0x0B, PPC2_VSX
),
11238 GEN_XX2FORM(xvtsqrtsp
, 0x14, 0x0A, PPC2_VSX
),
11239 GEN_XX3FORM(xvmaddasp
, 0x04, 0x08, PPC2_VSX
),
11240 GEN_XX3FORM(xvmaddmsp
, 0x04, 0x09, PPC2_VSX
),
11241 GEN_XX3FORM(xvmsubasp
, 0x04, 0x0A, PPC2_VSX
),
11242 GEN_XX3FORM(xvmsubmsp
, 0x04, 0x0B, PPC2_VSX
),
11243 GEN_XX3FORM(xvnmaddasp
, 0x04, 0x18, PPC2_VSX
),
11244 GEN_XX3FORM(xvnmaddmsp
, 0x04, 0x19, PPC2_VSX
),
11245 GEN_XX3FORM(xvnmsubasp
, 0x04, 0x1A, PPC2_VSX
),
11246 GEN_XX3FORM(xvnmsubmsp
, 0x04, 0x1B, PPC2_VSX
),
11247 GEN_XX3FORM(xvmaxsp
, 0x00, 0x18, PPC2_VSX
),
11248 GEN_XX3FORM(xvminsp
, 0x00, 0x19, PPC2_VSX
),
11249 GEN_XX3_RC_FORM(xvcmpeqsp
, 0x0C, 0x08, PPC2_VSX
),
11250 GEN_XX3_RC_FORM(xvcmpgtsp
, 0x0C, 0x09, PPC2_VSX
),
11251 GEN_XX3_RC_FORM(xvcmpgesp
, 0x0C, 0x0A, PPC2_VSX
),
11252 GEN_XX2FORM(xvcvspdp
, 0x12, 0x1C, PPC2_VSX
),
11253 GEN_XX2FORM(xvcvspsxds
, 0x10, 0x19, PPC2_VSX
),
11254 GEN_XX2FORM(xvcvspsxws
, 0x10, 0x09, PPC2_VSX
),
11255 GEN_XX2FORM(xvcvspuxds
, 0x10, 0x18, PPC2_VSX
),
11256 GEN_XX2FORM(xvcvspuxws
, 0x10, 0x08, PPC2_VSX
),
11257 GEN_XX2FORM(xvcvsxdsp
, 0x10, 0x1B, PPC2_VSX
),
11258 GEN_XX2FORM(xvcvuxdsp
, 0x10, 0x1A, PPC2_VSX
),
11259 GEN_XX2FORM(xvcvsxwsp
, 0x10, 0x0B, PPC2_VSX
),
11260 GEN_XX2FORM(xvcvuxwsp
, 0x10, 0x0A, PPC2_VSX
),
11261 GEN_XX2FORM(xvrspi
, 0x12, 0x08, PPC2_VSX
),
11262 GEN_XX2FORM(xvrspic
, 0x16, 0x0A, PPC2_VSX
),
11263 GEN_XX2FORM(xvrspim
, 0x12, 0x0B, PPC2_VSX
),
11264 GEN_XX2FORM(xvrspip
, 0x12, 0x0A, PPC2_VSX
),
11265 GEN_XX2FORM(xvrspiz
, 0x12, 0x09, PPC2_VSX
),
11268 #define VSX_LOGICAL(name, opc2, opc3, fl2) \
11269 GEN_XX3FORM(name, opc2, opc3, fl2)
11271 VSX_LOGICAL(xxland
, 0x8, 0x10, PPC2_VSX
),
11272 VSX_LOGICAL(xxlandc
, 0x8, 0x11, PPC2_VSX
),
11273 VSX_LOGICAL(xxlor
, 0x8, 0x12, PPC2_VSX
),
11274 VSX_LOGICAL(xxlxor
, 0x8, 0x13, PPC2_VSX
),
11275 VSX_LOGICAL(xxlnor
, 0x8, 0x14, PPC2_VSX
),
11276 VSX_LOGICAL(xxleqv
, 0x8, 0x17, PPC2_VSX207
),
11277 VSX_LOGICAL(xxlnand
, 0x8, 0x16, PPC2_VSX207
),
11278 VSX_LOGICAL(xxlorc
, 0x8, 0x15, PPC2_VSX207
),
11279 GEN_XX3FORM(xxmrghw
, 0x08, 0x02, PPC2_VSX
),
11280 GEN_XX3FORM(xxmrglw
, 0x08, 0x06, PPC2_VSX
),
11281 GEN_XX2FORM(xxspltw
, 0x08, 0x0A, PPC2_VSX
),
11282 GEN_XX3FORM_DM(xxsldwi
, 0x08, 0x00),
11284 #define GEN_XXSEL_ROW(opc3) \
11285 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x18, opc3, 0, PPC_NONE, PPC2_VSX), \
11286 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x19, opc3, 0, PPC_NONE, PPC2_VSX), \
11287 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1A, opc3, 0, PPC_NONE, PPC2_VSX), \
11288 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1B, opc3, 0, PPC_NONE, PPC2_VSX), \
11289 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1C, opc3, 0, PPC_NONE, PPC2_VSX), \
11290 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1D, opc3, 0, PPC_NONE, PPC2_VSX), \
11291 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1E, opc3, 0, PPC_NONE, PPC2_VSX), \
11292 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1F, opc3, 0, PPC_NONE, PPC2_VSX), \
11294 GEN_XXSEL_ROW(0x00)
11295 GEN_XXSEL_ROW(0x01)
11296 GEN_XXSEL_ROW(0x02)
11297 GEN_XXSEL_ROW(0x03)
11298 GEN_XXSEL_ROW(0x04)
11299 GEN_XXSEL_ROW(0x05)
11300 GEN_XXSEL_ROW(0x06)
11301 GEN_XXSEL_ROW(0x07)
11302 GEN_XXSEL_ROW(0x08)
11303 GEN_XXSEL_ROW(0x09)
11304 GEN_XXSEL_ROW(0x0A)
11305 GEN_XXSEL_ROW(0x0B)
11306 GEN_XXSEL_ROW(0x0C)
11307 GEN_XXSEL_ROW(0x0D)
11308 GEN_XXSEL_ROW(0x0E)
11309 GEN_XXSEL_ROW(0x0F)
11310 GEN_XXSEL_ROW(0x10)
11311 GEN_XXSEL_ROW(0x11)
11312 GEN_XXSEL_ROW(0x12)
11313 GEN_XXSEL_ROW(0x13)
11314 GEN_XXSEL_ROW(0x14)
11315 GEN_XXSEL_ROW(0x15)
11316 GEN_XXSEL_ROW(0x16)
11317 GEN_XXSEL_ROW(0x17)
11318 GEN_XXSEL_ROW(0x18)
11319 GEN_XXSEL_ROW(0x19)
11320 GEN_XXSEL_ROW(0x1A)
11321 GEN_XXSEL_ROW(0x1B)
11322 GEN_XXSEL_ROW(0x1C)
11323 GEN_XXSEL_ROW(0x1D)
11324 GEN_XXSEL_ROW(0x1E)
11325 GEN_XXSEL_ROW(0x1F)
11327 GEN_XX3FORM_DM(xxpermdi
, 0x08, 0x01),
11329 #undef GEN_DFP_T_A_B_Rc
11330 #undef GEN_DFP_BF_A_B
11331 #undef GEN_DFP_BF_A_DCM
11332 #undef GEN_DFP_T_B_U32_U32_Rc
11333 #undef GEN_DFP_T_A_B_I32_Rc
11334 #undef GEN_DFP_T_B_Rc
11335 #undef GEN_DFP_T_FPR_I32_Rc
11337 #define _GEN_DFP_LONG(name, op1, op2, mask) \
11338 GEN_HANDLER_E(name, 0x3B, op1, op2, mask, PPC_NONE, PPC2_DFP)
11340 #define _GEN_DFP_LONGx2(name, op1, op2, mask) \
11341 GEN_HANDLER_E(name, 0x3B, op1, 0x00 | op2, mask, PPC_NONE, PPC2_DFP), \
11342 GEN_HANDLER_E(name, 0x3B, op1, 0x10 | op2, mask, PPC_NONE, PPC2_DFP)
11344 #define _GEN_DFP_LONGx4(name, op1, op2, mask) \
11345 GEN_HANDLER_E(name, 0x3B, op1, 0x00 | op2, mask, PPC_NONE, PPC2_DFP), \
11346 GEN_HANDLER_E(name, 0x3B, op1, 0x08 | op2, mask, PPC_NONE, PPC2_DFP), \
11347 GEN_HANDLER_E(name, 0x3B, op1, 0x10 | op2, mask, PPC_NONE, PPC2_DFP), \
11348 GEN_HANDLER_E(name, 0x3B, op1, 0x18 | op2, mask, PPC_NONE, PPC2_DFP)
11350 #define _GEN_DFP_QUAD(name, op1, op2, mask) \
11351 GEN_HANDLER_E(name, 0x3F, op1, op2, mask, PPC_NONE, PPC2_DFP)
11353 #define _GEN_DFP_QUADx2(name, op1, op2, mask) \
11354 GEN_HANDLER_E(name, 0x3F, op1, 0x00 | op2, mask, PPC_NONE, PPC2_DFP), \
11355 GEN_HANDLER_E(name, 0x3F, op1, 0x10 | op2, mask, PPC_NONE, PPC2_DFP)
11357 #define _GEN_DFP_QUADx4(name, op1, op2, mask) \
11358 GEN_HANDLER_E(name, 0x3F, op1, 0x00 | op2, mask, PPC_NONE, PPC2_DFP), \
11359 GEN_HANDLER_E(name, 0x3F, op1, 0x08 | op2, mask, PPC_NONE, PPC2_DFP), \
11360 GEN_HANDLER_E(name, 0x3F, op1, 0x10 | op2, mask, PPC_NONE, PPC2_DFP), \
11361 GEN_HANDLER_E(name, 0x3F, op1, 0x18 | op2, mask, PPC_NONE, PPC2_DFP)
11363 #define GEN_DFP_T_A_B_Rc(name, op1, op2) \
11364 _GEN_DFP_LONG(name, op1, op2, 0x00000000)
11366 #define GEN_DFP_Tp_Ap_Bp_Rc(name, op1, op2) \
11367 _GEN_DFP_QUAD(name, op1, op2, 0x00210800)
11369 #define GEN_DFP_Tp_A_Bp_Rc(name, op1, op2) \
11370 _GEN_DFP_QUAD(name, op1, op2, 0x00200800)
11372 #define GEN_DFP_T_B_Rc(name, op1, op2) \
11373 _GEN_DFP_LONG(name, op1, op2, 0x001F0000)
11375 #define GEN_DFP_Tp_Bp_Rc(name, op1, op2) \
11376 _GEN_DFP_QUAD(name, op1, op2, 0x003F0800)
11378 #define GEN_DFP_Tp_B_Rc(name, op1, op2) \
11379 _GEN_DFP_QUAD(name, op1, op2, 0x003F0000)
11381 #define GEN_DFP_T_Bp_Rc(name, op1, op2) \
11382 _GEN_DFP_QUAD(name, op1, op2, 0x001F0800)
11384 #define GEN_DFP_BF_A_B(name, op1, op2) \
11385 _GEN_DFP_LONG(name, op1, op2, 0x00000001)
11387 #define GEN_DFP_BF_Ap_Bp(name, op1, op2) \
11388 _GEN_DFP_QUAD(name, op1, op2, 0x00610801)
11390 #define GEN_DFP_BF_A_Bp(name, op1, op2) \
11391 _GEN_DFP_QUAD(name, op1, op2, 0x00600801)
11393 #define GEN_DFP_BF_A_DCM(name, op1, op2) \
11394 _GEN_DFP_LONGx2(name, op1, op2, 0x00600001)
11396 #define GEN_DFP_BF_Ap_DCM(name, op1, op2) \
11397 _GEN_DFP_QUADx2(name, op1, op2, 0x00610001)
11399 #define GEN_DFP_T_A_B_RMC_Rc(name, op1, op2) \
11400 _GEN_DFP_LONGx4(name, op1, op2, 0x00000000)
11402 #define GEN_DFP_Tp_Ap_Bp_RMC_Rc(name, op1, op2) \
11403 _GEN_DFP_QUADx4(name, op1, op2, 0x02010800)
11405 #define GEN_DFP_Tp_A_Bp_RMC_Rc(name, op1, op2) \
11406 _GEN_DFP_QUADx4(name, op1, op2, 0x02000800)
11408 #define GEN_DFP_TE_T_B_RMC_Rc(name, op1, op2) \
11409 _GEN_DFP_LONGx4(name, op1, op2, 0x00000000)
11411 #define GEN_DFP_TE_Tp_Bp_RMC_Rc(name, op1, op2) \
11412 _GEN_DFP_QUADx4(name, op1, op2, 0x00200800)
11414 #define GEN_DFP_R_T_B_RMC_Rc(name, op1, op2) \
11415 _GEN_DFP_LONGx4(name, op1, op2, 0x001E0000)
11417 #define GEN_DFP_R_Tp_Bp_RMC_Rc(name, op1, op2) \
11418 _GEN_DFP_QUADx4(name, op1, op2, 0x003E0800)
11420 #define GEN_DFP_SP_T_B_Rc(name, op1, op2) \
11421 _GEN_DFP_LONG(name, op1, op2, 0x00070000)
11423 #define GEN_DFP_SP_Tp_Bp_Rc(name, op1, op2) \
11424 _GEN_DFP_QUAD(name, op1, op2, 0x00270800)
11426 #define GEN_DFP_S_T_B_Rc(name, op1, op2) \
11427 _GEN_DFP_LONG(name, op1, op2, 0x000F0000)
11429 #define GEN_DFP_S_Tp_Bp_Rc(name, op1, op2) \
11430 _GEN_DFP_QUAD(name, op1, op2, 0x002F0800)
11432 #define GEN_DFP_T_A_SH_Rc(name, op1, op2) \
11433 _GEN_DFP_LONGx2(name, op1, op2, 0x00000000)
11435 #define GEN_DFP_Tp_Ap_SH_Rc(name, op1, op2) \
11436 _GEN_DFP_QUADx2(name, op1, op2, 0x00210000)
11438 GEN_DFP_T_A_B_Rc(dadd
, 0x02, 0x00),
11439 GEN_DFP_Tp_Ap_Bp_Rc(daddq
, 0x02, 0x00),
11440 GEN_DFP_T_A_B_Rc(dsub
, 0x02, 0x10),
11441 GEN_DFP_Tp_Ap_Bp_Rc(dsubq
, 0x02, 0x10),
11442 GEN_DFP_T_A_B_Rc(dmul
, 0x02, 0x01),
11443 GEN_DFP_Tp_Ap_Bp_Rc(dmulq
, 0x02, 0x01),
11444 GEN_DFP_T_A_B_Rc(ddiv
, 0x02, 0x11),
11445 GEN_DFP_Tp_Ap_Bp_Rc(ddivq
, 0x02, 0x11),
11446 GEN_DFP_BF_A_B(dcmpu
, 0x02, 0x14),
11447 GEN_DFP_BF_Ap_Bp(dcmpuq
, 0x02, 0x14),
11448 GEN_DFP_BF_A_B(dcmpo
, 0x02, 0x04),
11449 GEN_DFP_BF_Ap_Bp(dcmpoq
, 0x02, 0x04),
11450 GEN_DFP_BF_A_DCM(dtstdc
, 0x02, 0x06),
11451 GEN_DFP_BF_Ap_DCM(dtstdcq
, 0x02, 0x06),
11452 GEN_DFP_BF_A_DCM(dtstdg
, 0x02, 0x07),
11453 GEN_DFP_BF_Ap_DCM(dtstdgq
, 0x02, 0x07),
11454 GEN_DFP_BF_A_B(dtstex
, 0x02, 0x05),
11455 GEN_DFP_BF_Ap_Bp(dtstexq
, 0x02, 0x05),
11456 GEN_DFP_BF_A_B(dtstsf
, 0x02, 0x15),
11457 GEN_DFP_BF_A_Bp(dtstsfq
, 0x02, 0x15),
11458 GEN_DFP_TE_T_B_RMC_Rc(dquai
, 0x03, 0x02),
11459 GEN_DFP_TE_Tp_Bp_RMC_Rc(dquaiq
, 0x03, 0x02),
11460 GEN_DFP_T_A_B_RMC_Rc(dqua
, 0x03, 0x00),
11461 GEN_DFP_Tp_Ap_Bp_RMC_Rc(dquaq
, 0x03, 0x00),
11462 GEN_DFP_T_A_B_RMC_Rc(drrnd
, 0x03, 0x01),
11463 GEN_DFP_Tp_A_Bp_RMC_Rc(drrndq
, 0x03, 0x01),
11464 GEN_DFP_R_T_B_RMC_Rc(drintx
, 0x03, 0x03),
11465 GEN_DFP_R_Tp_Bp_RMC_Rc(drintxq
, 0x03, 0x03),
11466 GEN_DFP_R_T_B_RMC_Rc(drintn
, 0x03, 0x07),
11467 GEN_DFP_R_Tp_Bp_RMC_Rc(drintnq
, 0x03, 0x07),
11468 GEN_DFP_T_B_Rc(dctdp
, 0x02, 0x08),
11469 GEN_DFP_Tp_B_Rc(dctqpq
, 0x02, 0x08),
11470 GEN_DFP_T_B_Rc(drsp
, 0x02, 0x18),
11471 GEN_DFP_Tp_Bp_Rc(drdpq
, 0x02, 0x18),
11472 GEN_DFP_T_B_Rc(dcffix
, 0x02, 0x19),
11473 GEN_DFP_Tp_B_Rc(dcffixq
, 0x02, 0x19),
11474 GEN_DFP_T_B_Rc(dctfix
, 0x02, 0x09),
11475 GEN_DFP_T_Bp_Rc(dctfixq
, 0x02, 0x09),
11476 GEN_DFP_SP_T_B_Rc(ddedpd
, 0x02, 0x0a),
11477 GEN_DFP_SP_Tp_Bp_Rc(ddedpdq
, 0x02, 0x0a),
11478 GEN_DFP_S_T_B_Rc(denbcd
, 0x02, 0x1a),
11479 GEN_DFP_S_Tp_Bp_Rc(denbcdq
, 0x02, 0x1a),
11480 GEN_DFP_T_B_Rc(dxex
, 0x02, 0x0b),
11481 GEN_DFP_T_Bp_Rc(dxexq
, 0x02, 0x0b),
11482 GEN_DFP_T_A_B_Rc(diex
, 0x02, 0x1b),
11483 GEN_DFP_Tp_A_Bp_Rc(diexq
, 0x02, 0x1b),
11484 GEN_DFP_T_A_SH_Rc(dscli
, 0x02, 0x02),
11485 GEN_DFP_Tp_Ap_SH_Rc(dscliq
, 0x02, 0x02),
11486 GEN_DFP_T_A_SH_Rc(dscri
, 0x02, 0x03),
11487 GEN_DFP_Tp_Ap_SH_Rc(dscriq
, 0x02, 0x03),
11490 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
11491 GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
11492 GEN_SPE(evaddw
, speundef
, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11493 GEN_SPE(evaddiw
, speundef
, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11494 GEN_SPE(evsubfw
, speundef
, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11495 GEN_SPE(evsubifw
, speundef
, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11496 GEN_SPE(evabs
, evneg
, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
11497 GEN_SPE(evextsb
, evextsh
, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
11498 GEN_SPE(evrndw
, evcntlzw
, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
11499 GEN_SPE(evcntlsw
, brinc
, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE
),
11500 GEN_SPE(evmra
, speundef
, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE
),
11501 GEN_SPE(speundef
, evand
, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
),
11502 GEN_SPE(evandc
, speundef
, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11503 GEN_SPE(evxor
, evor
, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11504 GEN_SPE(evnor
, eveqv
, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11505 GEN_SPE(evmwumi
, evmwsmi
, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE
),
11506 GEN_SPE(evmwumia
, evmwsmia
, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE
),
11507 GEN_SPE(evmwumiaa
, evmwsmiaa
, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE
),
11508 GEN_SPE(speundef
, evorc
, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
),
11509 GEN_SPE(evnand
, speundef
, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11510 GEN_SPE(evsrwu
, evsrws
, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11511 GEN_SPE(evsrwiu
, evsrwis
, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11512 GEN_SPE(evslw
, speundef
, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11513 GEN_SPE(evslwi
, speundef
, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
11514 GEN_SPE(evrlw
, evsplati
, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE
),
11515 GEN_SPE(evrlwi
, evsplatfi
, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE
),
11516 GEN_SPE(evmergehi
, evmergelo
, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11517 GEN_SPE(evmergehilo
, evmergelohi
, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
11518 GEN_SPE(evcmpgtu
, evcmpgts
, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE
),
11519 GEN_SPE(evcmpltu
, evcmplts
, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE
),
11520 GEN_SPE(evcmpeq
, speundef
, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE
),
11522 GEN_SPE(evfsadd
, evfssub
, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
11523 GEN_SPE(evfsabs
, evfsnabs
, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
),
11524 GEN_SPE(evfsneg
, speundef
, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11525 GEN_SPE(evfsmul
, evfsdiv
, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
11526 GEN_SPE(evfscmpgt
, evfscmplt
, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
11527 GEN_SPE(evfscmpeq
, speundef
, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11528 GEN_SPE(evfscfui
, evfscfsi
, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11529 GEN_SPE(evfscfuf
, evfscfsf
, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11530 GEN_SPE(evfsctui
, evfsctsi
, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11531 GEN_SPE(evfsctuf
, evfsctsf
, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11532 GEN_SPE(evfsctuiz
, speundef
, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11533 GEN_SPE(evfsctsiz
, speundef
, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11534 GEN_SPE(evfststgt
, evfststlt
, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
11535 GEN_SPE(evfststeq
, speundef
, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11537 GEN_SPE(efsadd
, efssub
, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
11538 GEN_SPE(efsabs
, efsnabs
, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
),
11539 GEN_SPE(efsneg
, speundef
, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11540 GEN_SPE(efsmul
, efsdiv
, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
11541 GEN_SPE(efscmpgt
, efscmplt
, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
11542 GEN_SPE(efscmpeq
, efscfd
, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE
),
11543 GEN_SPE(efscfui
, efscfsi
, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11544 GEN_SPE(efscfuf
, efscfsf
, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11545 GEN_SPE(efsctui
, efsctsi
, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11546 GEN_SPE(efsctuf
, efsctsf
, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
11547 GEN_SPE(efsctuiz
, speundef
, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11548 GEN_SPE(efsctsiz
, speundef
, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11549 GEN_SPE(efststgt
, efststlt
, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
11550 GEN_SPE(efststeq
, speundef
, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
11552 GEN_SPE(efdadd
, efdsub
, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
),
11553 GEN_SPE(efdcfuid
, efdcfsid
, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11554 GEN_SPE(efdabs
, efdnabs
, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE
),
11555 GEN_SPE(efdneg
, speundef
, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
11556 GEN_SPE(efdmul
, efddiv
, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
),
11557 GEN_SPE(efdctuidz
, efdctsidz
, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11558 GEN_SPE(efdcmpgt
, efdcmplt
, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
),
11559 GEN_SPE(efdcmpeq
, efdcfs
, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE
),
11560 GEN_SPE(efdcfui
, efdcfsi
, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11561 GEN_SPE(efdcfuf
, efdcfsf
, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11562 GEN_SPE(efdctui
, efdctsi
, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11563 GEN_SPE(efdctuf
, efdctsf
, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
11564 GEN_SPE(efdctuiz
, speundef
, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
11565 GEN_SPE(efdctsiz
, speundef
, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
11566 GEN_SPE(efdtstgt
, efdtstlt
, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
),
11567 GEN_SPE(efdtsteq
, speundef
, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
11569 #undef GEN_SPEOP_LDST
11570 #define GEN_SPEOP_LDST(name, opc2, sh) \
11571 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
11572 GEN_SPEOP_LDST(evldd
, 0x00, 3),
11573 GEN_SPEOP_LDST(evldw
, 0x01, 3),
11574 GEN_SPEOP_LDST(evldh
, 0x02, 3),
11575 GEN_SPEOP_LDST(evlhhesplat
, 0x04, 1),
11576 GEN_SPEOP_LDST(evlhhousplat
, 0x06, 1),
11577 GEN_SPEOP_LDST(evlhhossplat
, 0x07, 1),
11578 GEN_SPEOP_LDST(evlwhe
, 0x08, 2),
11579 GEN_SPEOP_LDST(evlwhou
, 0x0A, 2),
11580 GEN_SPEOP_LDST(evlwhos
, 0x0B, 2),
11581 GEN_SPEOP_LDST(evlwwsplat
, 0x0C, 2),
11582 GEN_SPEOP_LDST(evlwhsplat
, 0x0E, 2),
11584 GEN_SPEOP_LDST(evstdd
, 0x10, 3),
11585 GEN_SPEOP_LDST(evstdw
, 0x11, 3),
11586 GEN_SPEOP_LDST(evstdh
, 0x12, 3),
11587 GEN_SPEOP_LDST(evstwhe
, 0x18, 2),
11588 GEN_SPEOP_LDST(evstwho
, 0x1A, 2),
11589 GEN_SPEOP_LDST(evstwwe
, 0x1C, 2),
11590 GEN_SPEOP_LDST(evstwwo
, 0x1E, 2),
11592 GEN_HANDLER2_E(tbegin
, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
11593 PPC_NONE
, PPC2_TM
),
11594 GEN_HANDLER2_E(tend
, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
11595 PPC_NONE
, PPC2_TM
),
11596 GEN_HANDLER2_E(tabort
, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
11597 PPC_NONE
, PPC2_TM
),
11598 GEN_HANDLER2_E(tabortwc
, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
11599 PPC_NONE
, PPC2_TM
),
11600 GEN_HANDLER2_E(tabortwci
, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
11601 PPC_NONE
, PPC2_TM
),
11602 GEN_HANDLER2_E(tabortdc
, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
11603 PPC_NONE
, PPC2_TM
),
11604 GEN_HANDLER2_E(tabortdci
, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
11605 PPC_NONE
, PPC2_TM
),
11606 GEN_HANDLER2_E(tsr
, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
11607 PPC_NONE
, PPC2_TM
),
11608 GEN_HANDLER2_E(tcheck
, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
11609 PPC_NONE
, PPC2_TM
),
11610 GEN_HANDLER2_E(treclaim
, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
11611 PPC_NONE
, PPC2_TM
),
11612 GEN_HANDLER2_E(trechkpt
, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
11613 PPC_NONE
, PPC2_TM
),
11616 #include "helper_regs.h"
11617 #include "translate_init.c"
11619 /*****************************************************************************/
11620 /* Misc PowerPC helpers */
11621 void ppc_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
11627 PowerPCCPU
*cpu
= POWERPC_CPU(cs
);
11628 CPUPPCState
*env
= &cpu
->env
;
11631 cpu_fprintf(f
, "NIP " TARGET_FMT_lx
" LR " TARGET_FMT_lx
" CTR "
11632 TARGET_FMT_lx
" XER " TARGET_FMT_lx
" CPU#%d\n",
11633 env
->nip
, env
->lr
, env
->ctr
, cpu_read_xer(env
),
11635 cpu_fprintf(f
, "MSR " TARGET_FMT_lx
" HID0 " TARGET_FMT_lx
" HF "
11636 TARGET_FMT_lx
" iidx %d didx %d\n",
11637 env
->msr
, env
->spr
[SPR_HID0
],
11638 env
->hflags
, env
->immu_idx
, env
->dmmu_idx
);
11639 #if !defined(NO_TIMER_DUMP)
11640 cpu_fprintf(f
, "TB %08" PRIu32
" %08" PRIu64
11641 #if !defined(CONFIG_USER_ONLY)
11645 cpu_ppc_load_tbu(env
), cpu_ppc_load_tbl(env
)
11646 #if !defined(CONFIG_USER_ONLY)
11647 , cpu_ppc_load_decr(env
)
11651 for (i
= 0; i
< 32; i
++) {
11652 if ((i
& (RGPL
- 1)) == 0)
11653 cpu_fprintf(f
, "GPR%02d", i
);
11654 cpu_fprintf(f
, " %016" PRIx64
, ppc_dump_gpr(env
, i
));
11655 if ((i
& (RGPL
- 1)) == (RGPL
- 1))
11656 cpu_fprintf(f
, "\n");
11658 cpu_fprintf(f
, "CR ");
11659 for (i
= 0; i
< 8; i
++)
11660 cpu_fprintf(f
, "%01x", env
->crf
[i
]);
11661 cpu_fprintf(f
, " [");
11662 for (i
= 0; i
< 8; i
++) {
11664 if (env
->crf
[i
] & 0x08)
11666 else if (env
->crf
[i
] & 0x04)
11668 else if (env
->crf
[i
] & 0x02)
11670 cpu_fprintf(f
, " %c%c", a
, env
->crf
[i
] & 0x01 ? 'O' : ' ');
11672 cpu_fprintf(f
, " ] RES " TARGET_FMT_lx
"\n",
11673 env
->reserve_addr
);
11674 for (i
= 0; i
< 32; i
++) {
11675 if ((i
& (RFPL
- 1)) == 0)
11676 cpu_fprintf(f
, "FPR%02d", i
);
11677 cpu_fprintf(f
, " %016" PRIx64
, *((uint64_t *)&env
->fpr
[i
]));
11678 if ((i
& (RFPL
- 1)) == (RFPL
- 1))
11679 cpu_fprintf(f
, "\n");
11681 cpu_fprintf(f
, "FPSCR " TARGET_FMT_lx
"\n", env
->fpscr
);
11682 #if !defined(CONFIG_USER_ONLY)
11683 cpu_fprintf(f
, " SRR0 " TARGET_FMT_lx
" SRR1 " TARGET_FMT_lx
11684 " PVR " TARGET_FMT_lx
" VRSAVE " TARGET_FMT_lx
"\n",
11685 env
->spr
[SPR_SRR0
], env
->spr
[SPR_SRR1
],
11686 env
->spr
[SPR_PVR
], env
->spr
[SPR_VRSAVE
]);
11688 cpu_fprintf(f
, "SPRG0 " TARGET_FMT_lx
" SPRG1 " TARGET_FMT_lx
11689 " SPRG2 " TARGET_FMT_lx
" SPRG3 " TARGET_FMT_lx
"\n",
11690 env
->spr
[SPR_SPRG0
], env
->spr
[SPR_SPRG1
],
11691 env
->spr
[SPR_SPRG2
], env
->spr
[SPR_SPRG3
]);
11693 cpu_fprintf(f
, "SPRG4 " TARGET_FMT_lx
" SPRG5 " TARGET_FMT_lx
11694 " SPRG6 " TARGET_FMT_lx
" SPRG7 " TARGET_FMT_lx
"\n",
11695 env
->spr
[SPR_SPRG4
], env
->spr
[SPR_SPRG5
],
11696 env
->spr
[SPR_SPRG6
], env
->spr
[SPR_SPRG7
]);
11698 #if defined(TARGET_PPC64)
11699 if (env
->excp_model
== POWERPC_EXCP_POWER7
||
11700 env
->excp_model
== POWERPC_EXCP_POWER8
) {
11701 cpu_fprintf(f
, "HSRR0 " TARGET_FMT_lx
" HSRR1 " TARGET_FMT_lx
"\n",
11702 env
->spr
[SPR_HSRR0
], env
->spr
[SPR_HSRR1
]);
11705 if (env
->excp_model
== POWERPC_EXCP_BOOKE
) {
11706 cpu_fprintf(f
, "CSRR0 " TARGET_FMT_lx
" CSRR1 " TARGET_FMT_lx
11707 " MCSRR0 " TARGET_FMT_lx
" MCSRR1 " TARGET_FMT_lx
"\n",
11708 env
->spr
[SPR_BOOKE_CSRR0
], env
->spr
[SPR_BOOKE_CSRR1
],
11709 env
->spr
[SPR_BOOKE_MCSRR0
], env
->spr
[SPR_BOOKE_MCSRR1
]);
11711 cpu_fprintf(f
, " TCR " TARGET_FMT_lx
" TSR " TARGET_FMT_lx
11712 " ESR " TARGET_FMT_lx
" DEAR " TARGET_FMT_lx
"\n",
11713 env
->spr
[SPR_BOOKE_TCR
], env
->spr
[SPR_BOOKE_TSR
],
11714 env
->spr
[SPR_BOOKE_ESR
], env
->spr
[SPR_BOOKE_DEAR
]);
11716 cpu_fprintf(f
, " PIR " TARGET_FMT_lx
" DECAR " TARGET_FMT_lx
11717 " IVPR " TARGET_FMT_lx
" EPCR " TARGET_FMT_lx
"\n",
11718 env
->spr
[SPR_BOOKE_PIR
], env
->spr
[SPR_BOOKE_DECAR
],
11719 env
->spr
[SPR_BOOKE_IVPR
], env
->spr
[SPR_BOOKE_EPCR
]);
11721 cpu_fprintf(f
, " MCSR " TARGET_FMT_lx
" SPRG8 " TARGET_FMT_lx
11722 " EPR " TARGET_FMT_lx
"\n",
11723 env
->spr
[SPR_BOOKE_MCSR
], env
->spr
[SPR_BOOKE_SPRG8
],
11724 env
->spr
[SPR_BOOKE_EPR
]);
11727 cpu_fprintf(f
, " MCAR " TARGET_FMT_lx
" PID1 " TARGET_FMT_lx
11728 " PID2 " TARGET_FMT_lx
" SVR " TARGET_FMT_lx
"\n",
11729 env
->spr
[SPR_Exxx_MCAR
], env
->spr
[SPR_BOOKE_PID1
],
11730 env
->spr
[SPR_BOOKE_PID2
], env
->spr
[SPR_E500_SVR
]);
11733 * IVORs are left out as they are large and do not change often --
11734 * they can be read with "p $ivor0", "p $ivor1", etc.
11738 #if defined(TARGET_PPC64)
11739 if (env
->flags
& POWERPC_FLAG_CFAR
) {
11740 cpu_fprintf(f
, " CFAR " TARGET_FMT_lx
"\n", env
->cfar
);
11744 switch (env
->mmu_model
) {
11745 case POWERPC_MMU_32B
:
11746 case POWERPC_MMU_601
:
11747 case POWERPC_MMU_SOFT_6xx
:
11748 case POWERPC_MMU_SOFT_74xx
:
11749 #if defined(TARGET_PPC64)
11750 case POWERPC_MMU_64B
:
11751 case POWERPC_MMU_2_03
:
11752 case POWERPC_MMU_2_06
:
11753 case POWERPC_MMU_2_06a
:
11754 case POWERPC_MMU_2_07
:
11755 case POWERPC_MMU_2_07a
:
11757 cpu_fprintf(f
, " SDR1 " TARGET_FMT_lx
" DAR " TARGET_FMT_lx
11758 " DSISR " TARGET_FMT_lx
"\n", env
->spr
[SPR_SDR1
],
11759 env
->spr
[SPR_DAR
], env
->spr
[SPR_DSISR
]);
11761 case POWERPC_MMU_BOOKE206
:
11762 cpu_fprintf(f
, " MAS0 " TARGET_FMT_lx
" MAS1 " TARGET_FMT_lx
11763 " MAS2 " TARGET_FMT_lx
" MAS3 " TARGET_FMT_lx
"\n",
11764 env
->spr
[SPR_BOOKE_MAS0
], env
->spr
[SPR_BOOKE_MAS1
],
11765 env
->spr
[SPR_BOOKE_MAS2
], env
->spr
[SPR_BOOKE_MAS3
]);
11767 cpu_fprintf(f
, " MAS4 " TARGET_FMT_lx
" MAS6 " TARGET_FMT_lx
11768 " MAS7 " TARGET_FMT_lx
" PID " TARGET_FMT_lx
"\n",
11769 env
->spr
[SPR_BOOKE_MAS4
], env
->spr
[SPR_BOOKE_MAS6
],
11770 env
->spr
[SPR_BOOKE_MAS7
], env
->spr
[SPR_BOOKE_PID
]);
11772 cpu_fprintf(f
, "MMUCFG " TARGET_FMT_lx
" TLB0CFG " TARGET_FMT_lx
11773 " TLB1CFG " TARGET_FMT_lx
"\n",
11774 env
->spr
[SPR_MMUCFG
], env
->spr
[SPR_BOOKE_TLB0CFG
],
11775 env
->spr
[SPR_BOOKE_TLB1CFG
]);
11786 void ppc_cpu_dump_statistics(CPUState
*cs
, FILE*f
,
11787 fprintf_function cpu_fprintf
, int flags
)
11789 #if defined(DO_PPC_STATISTICS)
11790 PowerPCCPU
*cpu
= POWERPC_CPU(cs
);
11791 opc_handler_t
**t1
, **t2
, **t3
, *handler
;
11794 t1
= cpu
->env
.opcodes
;
11795 for (op1
= 0; op1
< 64; op1
++) {
11797 if (is_indirect_opcode(handler
)) {
11798 t2
= ind_table(handler
);
11799 for (op2
= 0; op2
< 32; op2
++) {
11801 if (is_indirect_opcode(handler
)) {
11802 t3
= ind_table(handler
);
11803 for (op3
= 0; op3
< 32; op3
++) {
11805 if (handler
->count
== 0)
11807 cpu_fprintf(f
, "%02x %02x %02x (%02x %04d) %16s: "
11808 "%016" PRIx64
" %" PRId64
"\n",
11809 op1
, op2
, op3
, op1
, (op3
<< 5) | op2
,
11811 handler
->count
, handler
->count
);
11814 if (handler
->count
== 0)
11816 cpu_fprintf(f
, "%02x %02x (%02x %04d) %16s: "
11817 "%016" PRIx64
" %" PRId64
"\n",
11818 op1
, op2
, op1
, op2
, handler
->oname
,
11819 handler
->count
, handler
->count
);
11823 if (handler
->count
== 0)
11825 cpu_fprintf(f
, "%02x (%02x ) %16s: %016" PRIx64
11827 op1
, op1
, handler
->oname
,
11828 handler
->count
, handler
->count
);
11834 /*****************************************************************************/
11835 void gen_intermediate_code(CPUPPCState
*env
, struct TranslationBlock
*tb
)
11837 PowerPCCPU
*cpu
= ppc_env_get_cpu(env
);
11838 CPUState
*cs
= CPU(cpu
);
11839 DisasContext ctx
, *ctxp
= &ctx
;
11840 opc_handler_t
**table
, *handler
;
11841 target_ulong pc_start
;
11846 ctx
.nip
= pc_start
;
11848 ctx
.exception
= POWERPC_EXCP_NONE
;
11849 ctx
.spr_cb
= env
->spr_cb
;
11851 ctx
.mem_idx
= env
->dmmu_idx
;
11853 #if !defined(CONFIG_USER_ONLY)
11854 ctx
.hv
= msr_hv
|| !env
->has_hv_mode
;
11856 ctx
.insns_flags
= env
->insns_flags
;
11857 ctx
.insns_flags2
= env
->insns_flags2
;
11858 ctx
.access_type
= -1;
11859 ctx
.le_mode
= !!(env
->hflags
& (1 << MSR_LE
));
11860 ctx
.default_tcg_memop_mask
= ctx
.le_mode
? MO_LE
: MO_BE
;
11861 #if defined(TARGET_PPC64)
11862 ctx
.sf_mode
= msr_is_64bit(env
, env
->msr
);
11863 ctx
.has_cfar
= !!(env
->flags
& POWERPC_FLAG_CFAR
);
11865 if (env
->mmu_model
== POWERPC_MMU_32B
||
11866 env
->mmu_model
== POWERPC_MMU_601
||
11867 (env
->mmu_model
& POWERPC_MMU_64B
))
11868 ctx
.lazy_tlb_flush
= true;
11870 ctx
.fpu_enabled
= !!msr_fp
;
11871 if ((env
->flags
& POWERPC_FLAG_SPE
) && msr_spe
)
11872 ctx
.spe_enabled
= !!msr_spe
;
11874 ctx
.spe_enabled
= false;
11875 if ((env
->flags
& POWERPC_FLAG_VRE
) && msr_vr
)
11876 ctx
.altivec_enabled
= !!msr_vr
;
11878 ctx
.altivec_enabled
= false;
11879 if ((env
->flags
& POWERPC_FLAG_VSX
) && msr_vsx
) {
11880 ctx
.vsx_enabled
= !!msr_vsx
;
11882 ctx
.vsx_enabled
= false;
11884 #if defined(TARGET_PPC64)
11885 if ((env
->flags
& POWERPC_FLAG_TM
) && msr_tm
) {
11886 ctx
.tm_enabled
= !!msr_tm
;
11888 ctx
.tm_enabled
= false;
11891 if ((env
->flags
& POWERPC_FLAG_SE
) && msr_se
)
11892 ctx
.singlestep_enabled
= CPU_SINGLE_STEP
;
11894 ctx
.singlestep_enabled
= 0;
11895 if ((env
->flags
& POWERPC_FLAG_BE
) && msr_be
)
11896 ctx
.singlestep_enabled
|= CPU_BRANCH_STEP
;
11897 if (unlikely(cs
->singlestep_enabled
)) {
11898 ctx
.singlestep_enabled
|= GDBSTUB_SINGLE_STEP
;
11900 #if defined (DO_SINGLE_STEP) && 0
11901 /* Single step trace mode */
11905 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
11906 if (max_insns
== 0) {
11907 max_insns
= CF_COUNT_MASK
;
11909 if (max_insns
> TCG_MAX_INSNS
) {
11910 max_insns
= TCG_MAX_INSNS
;
11914 tcg_clear_temp_count();
11915 /* Set env in case of segfault during code fetch */
11916 while (ctx
.exception
== POWERPC_EXCP_NONE
&& !tcg_op_buf_full()) {
11917 tcg_gen_insn_start(ctx
.nip
);
11920 if (unlikely(cpu_breakpoint_test(cs
, ctx
.nip
, BP_ANY
))) {
11921 gen_debug_exception(ctxp
);
11922 /* The address covered by the breakpoint must be included in
11923 [tb->pc, tb->pc + tb->size) in order to for it to be
11924 properly cleared -- thus we increment the PC here so that
11925 the logic setting tb->size below does the right thing. */
11930 LOG_DISAS("----------------\n");
11931 LOG_DISAS("nip=" TARGET_FMT_lx
" super=%d ir=%d\n",
11932 ctx
.nip
, ctx
.mem_idx
, (int)msr_ir
);
11933 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
))
11935 if (unlikely(need_byteswap(&ctx
))) {
11936 ctx
.opcode
= bswap32(cpu_ldl_code(env
, ctx
.nip
));
11938 ctx
.opcode
= cpu_ldl_code(env
, ctx
.nip
);
11940 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
11941 ctx
.opcode
, opc1(ctx
.opcode
), opc2(ctx
.opcode
),
11942 opc3(ctx
.opcode
), opc4(ctx
.opcode
),
11943 ctx
.le_mode
? "little" : "big");
11945 table
= env
->opcodes
;
11946 handler
= table
[opc1(ctx
.opcode
)];
11947 if (is_indirect_opcode(handler
)) {
11948 table
= ind_table(handler
);
11949 handler
= table
[opc2(ctx
.opcode
)];
11950 if (is_indirect_opcode(handler
)) {
11951 table
= ind_table(handler
);
11952 handler
= table
[opc3(ctx
.opcode
)];
11953 if (is_indirect_opcode(handler
)) {
11954 table
= ind_table(handler
);
11955 handler
= table
[opc4(ctx
.opcode
)];
11959 /* Is opcode *REALLY* valid ? */
11960 if (unlikely(handler
->handler
== &gen_invalid
)) {
11961 qemu_log_mask(LOG_GUEST_ERROR
, "invalid/unsupported opcode: "
11962 "%02x - %02x - %02x - %02x (%08x) "
11963 TARGET_FMT_lx
" %d\n",
11964 opc1(ctx
.opcode
), opc2(ctx
.opcode
),
11965 opc3(ctx
.opcode
), opc4(ctx
.opcode
),
11966 ctx
.opcode
, ctx
.nip
- 4, (int)msr_ir
);
11970 if (unlikely(handler
->type
& (PPC_SPE
| PPC_SPE_SINGLE
| PPC_SPE_DOUBLE
) && Rc(ctx
.opcode
))) {
11971 inval
= handler
->inval2
;
11973 inval
= handler
->inval1
;
11976 if (unlikely((ctx
.opcode
& inval
) != 0)) {
11977 qemu_log_mask(LOG_GUEST_ERROR
, "invalid bits: %08x for opcode: "
11978 "%02x - %02x - %02x - %02x (%08x) "
11979 TARGET_FMT_lx
"\n", ctx
.opcode
& inval
,
11980 opc1(ctx
.opcode
), opc2(ctx
.opcode
),
11981 opc3(ctx
.opcode
), opc4(ctx
.opcode
),
11982 ctx
.opcode
, ctx
.nip
- 4);
11983 gen_inval_exception(ctxp
, POWERPC_EXCP_INVAL_INVAL
);
11987 (*(handler
->handler
))(&ctx
);
11988 #if defined(DO_PPC_STATISTICS)
11991 /* Check trace mode exceptions */
11992 if (unlikely(ctx
.singlestep_enabled
& CPU_SINGLE_STEP
&&
11993 (ctx
.nip
<= 0x100 || ctx
.nip
> 0xF00) &&
11994 ctx
.exception
!= POWERPC_SYSCALL
&&
11995 ctx
.exception
!= POWERPC_EXCP_TRAP
&&
11996 ctx
.exception
!= POWERPC_EXCP_BRANCH
)) {
11997 gen_exception(ctxp
, POWERPC_EXCP_TRACE
);
11998 } else if (unlikely(((ctx
.nip
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
11999 (cs
->singlestep_enabled
) ||
12001 num_insns
>= max_insns
)) {
12002 /* if we reach a page boundary or are single stepping, stop
12007 if (tcg_check_temp_count()) {
12008 fprintf(stderr
, "Opcode %02x %02x %02x %02x (%08x) leaked "
12009 "temporaries\n", opc1(ctx
.opcode
), opc2(ctx
.opcode
),
12010 opc3(ctx
.opcode
), opc4(ctx
.opcode
), ctx
.opcode
);
12014 if (tb
->cflags
& CF_LAST_IO
)
12016 if (ctx
.exception
== POWERPC_EXCP_NONE
) {
12017 gen_goto_tb(&ctx
, 0, ctx
.nip
);
12018 } else if (ctx
.exception
!= POWERPC_EXCP_BRANCH
) {
12019 if (unlikely(cs
->singlestep_enabled
)) {
12020 gen_debug_exception(ctxp
);
12022 /* Generate the return instruction */
12023 tcg_gen_exit_tb(0);
12025 gen_tb_end(tb
, num_insns
);
12027 tb
->size
= ctx
.nip
- pc_start
;
12028 tb
->icount
= num_insns
;
12030 #if defined(DEBUG_DISAS)
12031 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
12032 && qemu_log_in_addr_range(pc_start
)) {
12034 flags
= env
->bfd_mach
;
12035 flags
|= ctx
.le_mode
<< 16;
12036 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12037 log_target_disas(cs
, pc_start
, ctx
.nip
- pc_start
, flags
);
12043 void restore_state_to_opc(CPUPPCState
*env
, TranslationBlock
*tb
,
12044 target_ulong
*data
)
12046 env
->nip
= data
[0];