2 * PowerPC emulation for qemu: main translation routines.
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
22 #include "disas/disas.h"
24 #include "qemu/host-utils.h"
30 #define CPU_SINGLE_STEP 0x1
31 #define CPU_BRANCH_STEP 0x2
32 #define GDBSTUB_SINGLE_STEP 0x4
34 /* Include definitions for instructions classes and implementations flags */
35 //#define PPC_DEBUG_DISAS
36 //#define DO_PPC_STATISTICS
38 #ifdef PPC_DEBUG_DISAS
39 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41 # define LOG_DISAS(...) do { } while (0)
43 /*****************************************************************************/
44 /* Code translation helpers */
46 /* global register indexes */
47 static TCGv_ptr cpu_env
;
48 static char cpu_reg_names
[10*3 + 22*4 /* GPR */
49 #if !defined(TARGET_PPC64)
50 + 10*4 + 22*5 /* SPE GPRh */
52 + 10*4 + 22*5 /* FPR */
53 + 2*(10*6 + 22*7) /* AVRh, AVRl */
54 + 10*5 + 22*6 /* VSR */
56 static TCGv cpu_gpr
[32];
57 #if !defined(TARGET_PPC64)
58 static TCGv cpu_gprh
[32];
60 static TCGv_i64 cpu_fpr
[32];
61 static TCGv_i64 cpu_avrh
[32], cpu_avrl
[32];
62 static TCGv_i64 cpu_vsr
[32];
63 static TCGv_i32 cpu_crf
[8];
68 #if defined(TARGET_PPC64)
71 static TCGv cpu_xer
, cpu_so
, cpu_ov
, cpu_ca
;
72 static TCGv cpu_reserve
;
73 static TCGv cpu_fpscr
;
74 static TCGv_i32 cpu_access_type
;
76 #include "exec/gen-icount.h"
78 void ppc_translate_init(void)
82 size_t cpu_reg_names_size
;
83 static int done_init
= 0;
88 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
91 cpu_reg_names_size
= sizeof(cpu_reg_names
);
93 for (i
= 0; i
< 8; i
++) {
94 snprintf(p
, cpu_reg_names_size
, "crf%d", i
);
95 cpu_crf
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
96 offsetof(CPUPPCState
, crf
[i
]), p
);
98 cpu_reg_names_size
-= 5;
101 for (i
= 0; i
< 32; i
++) {
102 snprintf(p
, cpu_reg_names_size
, "r%d", i
);
103 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
104 offsetof(CPUPPCState
, gpr
[i
]), p
);
105 p
+= (i
< 10) ? 3 : 4;
106 cpu_reg_names_size
-= (i
< 10) ? 3 : 4;
107 #if !defined(TARGET_PPC64)
108 snprintf(p
, cpu_reg_names_size
, "r%dH", i
);
109 cpu_gprh
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
110 offsetof(CPUPPCState
, gprh
[i
]), p
);
111 p
+= (i
< 10) ? 4 : 5;
112 cpu_reg_names_size
-= (i
< 10) ? 4 : 5;
115 snprintf(p
, cpu_reg_names_size
, "fp%d", i
);
116 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
117 offsetof(CPUPPCState
, fpr
[i
]), p
);
118 p
+= (i
< 10) ? 4 : 5;
119 cpu_reg_names_size
-= (i
< 10) ? 4 : 5;
121 snprintf(p
, cpu_reg_names_size
, "avr%dH", i
);
122 #ifdef HOST_WORDS_BIGENDIAN
123 cpu_avrh
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
124 offsetof(CPUPPCState
, avr
[i
].u64
[0]), p
);
126 cpu_avrh
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
127 offsetof(CPUPPCState
, avr
[i
].u64
[1]), p
);
129 p
+= (i
< 10) ? 6 : 7;
130 cpu_reg_names_size
-= (i
< 10) ? 6 : 7;
132 snprintf(p
, cpu_reg_names_size
, "avr%dL", i
);
133 #ifdef HOST_WORDS_BIGENDIAN
134 cpu_avrl
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
135 offsetof(CPUPPCState
, avr
[i
].u64
[1]), p
);
137 cpu_avrl
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
138 offsetof(CPUPPCState
, avr
[i
].u64
[0]), p
);
140 p
+= (i
< 10) ? 6 : 7;
141 cpu_reg_names_size
-= (i
< 10) ? 6 : 7;
142 snprintf(p
, cpu_reg_names_size
, "vsr%d", i
);
143 cpu_vsr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
144 offsetof(CPUPPCState
, vsr
[i
]), p
);
145 p
+= (i
< 10) ? 5 : 6;
146 cpu_reg_names_size
-= (i
< 10) ? 5 : 6;
149 cpu_nip
= tcg_global_mem_new(TCG_AREG0
,
150 offsetof(CPUPPCState
, nip
), "nip");
152 cpu_msr
= tcg_global_mem_new(TCG_AREG0
,
153 offsetof(CPUPPCState
, msr
), "msr");
155 cpu_ctr
= tcg_global_mem_new(TCG_AREG0
,
156 offsetof(CPUPPCState
, ctr
), "ctr");
158 cpu_lr
= tcg_global_mem_new(TCG_AREG0
,
159 offsetof(CPUPPCState
, lr
), "lr");
161 #if defined(TARGET_PPC64)
162 cpu_cfar
= tcg_global_mem_new(TCG_AREG0
,
163 offsetof(CPUPPCState
, cfar
), "cfar");
166 cpu_xer
= tcg_global_mem_new(TCG_AREG0
,
167 offsetof(CPUPPCState
, xer
), "xer");
168 cpu_so
= tcg_global_mem_new(TCG_AREG0
,
169 offsetof(CPUPPCState
, so
), "SO");
170 cpu_ov
= tcg_global_mem_new(TCG_AREG0
,
171 offsetof(CPUPPCState
, ov
), "OV");
172 cpu_ca
= tcg_global_mem_new(TCG_AREG0
,
173 offsetof(CPUPPCState
, ca
), "CA");
175 cpu_reserve
= tcg_global_mem_new(TCG_AREG0
,
176 offsetof(CPUPPCState
, reserve_addr
),
179 cpu_fpscr
= tcg_global_mem_new(TCG_AREG0
,
180 offsetof(CPUPPCState
, fpscr
), "fpscr");
182 cpu_access_type
= tcg_global_mem_new_i32(TCG_AREG0
,
183 offsetof(CPUPPCState
, access_type
), "access_type");
188 /* internal defines */
189 typedef struct DisasContext
{
190 struct TranslationBlock
*tb
;
194 /* Routine used to access memory */
197 /* Translation flags */
199 #if defined(TARGET_PPC64)
207 ppc_spr_t
*spr_cb
; /* Needed to check rights for mfspr/mtspr */
208 int singlestep_enabled
;
209 uint64_t insns_flags
;
210 uint64_t insns_flags2
;
213 /* True when active word size < size of target_long. */
215 # define NARROW_MODE(C) (!(C)->sf_mode)
217 # define NARROW_MODE(C) 0
220 struct opc_handler_t
{
221 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
223 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
225 /* instruction type */
227 /* extended instruction type */
230 void (*handler
)(DisasContext
*ctx
);
231 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
234 #if defined(DO_PPC_STATISTICS)
239 static inline void gen_reset_fpstatus(void)
241 gen_helper_reset_fpstatus(cpu_env
);
244 static inline void gen_compute_fprf(TCGv_i64 arg
, int set_fprf
, int set_rc
)
246 TCGv_i32 t0
= tcg_temp_new_i32();
249 /* This case might be optimized later */
250 tcg_gen_movi_i32(t0
, 1);
251 gen_helper_compute_fprf(t0
, cpu_env
, arg
, t0
);
252 if (unlikely(set_rc
)) {
253 tcg_gen_mov_i32(cpu_crf
[1], t0
);
255 gen_helper_float_check_status(cpu_env
);
256 } else if (unlikely(set_rc
)) {
257 /* We always need to compute fpcc */
258 tcg_gen_movi_i32(t0
, 0);
259 gen_helper_compute_fprf(t0
, cpu_env
, arg
, t0
);
260 tcg_gen_mov_i32(cpu_crf
[1], t0
);
263 tcg_temp_free_i32(t0
);
266 static inline void gen_set_access_type(DisasContext
*ctx
, int access_type
)
268 if (ctx
->access_type
!= access_type
) {
269 tcg_gen_movi_i32(cpu_access_type
, access_type
);
270 ctx
->access_type
= access_type
;
274 static inline void gen_update_nip(DisasContext
*ctx
, target_ulong nip
)
276 if (NARROW_MODE(ctx
)) {
279 tcg_gen_movi_tl(cpu_nip
, nip
);
282 static inline void gen_exception_err(DisasContext
*ctx
, uint32_t excp
, uint32_t error
)
285 if (ctx
->exception
== POWERPC_EXCP_NONE
) {
286 gen_update_nip(ctx
, ctx
->nip
);
288 t0
= tcg_const_i32(excp
);
289 t1
= tcg_const_i32(error
);
290 gen_helper_raise_exception_err(cpu_env
, t0
, t1
);
291 tcg_temp_free_i32(t0
);
292 tcg_temp_free_i32(t1
);
293 ctx
->exception
= (excp
);
296 static inline void gen_exception(DisasContext
*ctx
, uint32_t excp
)
299 if (ctx
->exception
== POWERPC_EXCP_NONE
) {
300 gen_update_nip(ctx
, ctx
->nip
);
302 t0
= tcg_const_i32(excp
);
303 gen_helper_raise_exception(cpu_env
, t0
);
304 tcg_temp_free_i32(t0
);
305 ctx
->exception
= (excp
);
308 static inline void gen_debug_exception(DisasContext
*ctx
)
312 if ((ctx
->exception
!= POWERPC_EXCP_BRANCH
) &&
313 (ctx
->exception
!= POWERPC_EXCP_SYNC
)) {
314 gen_update_nip(ctx
, ctx
->nip
);
316 t0
= tcg_const_i32(EXCP_DEBUG
);
317 gen_helper_raise_exception(cpu_env
, t0
);
318 tcg_temp_free_i32(t0
);
321 static inline void gen_inval_exception(DisasContext
*ctx
, uint32_t error
)
323 gen_exception_err(ctx
, POWERPC_EXCP_PROGRAM
, POWERPC_EXCP_INVAL
| error
);
326 /* Stop translation */
327 static inline void gen_stop_exception(DisasContext
*ctx
)
329 gen_update_nip(ctx
, ctx
->nip
);
330 ctx
->exception
= POWERPC_EXCP_STOP
;
333 /* No need to update nip here, as execution flow will change */
334 static inline void gen_sync_exception(DisasContext
*ctx
)
336 ctx
->exception
= POWERPC_EXCP_SYNC
;
339 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
340 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
342 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
343 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
345 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
346 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
348 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
349 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
351 typedef struct opcode_t
{
352 unsigned char opc1
, opc2
, opc3
;
353 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
354 unsigned char pad
[5];
356 unsigned char pad
[1];
358 opc_handler_t handler
;
362 /*****************************************************************************/
363 /*** Instruction decoding ***/
364 #define EXTRACT_HELPER(name, shift, nb) \
365 static inline uint32_t name(uint32_t opcode) \
367 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
370 #define EXTRACT_SHELPER(name, shift, nb) \
371 static inline int32_t name(uint32_t opcode) \
373 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
376 #define EXTRACT_HELPER_SPLIT(name, shift1, nb1, shift2, nb2) \
377 static inline uint32_t name(uint32_t opcode) \
379 return (((opcode >> (shift1)) & ((1 << (nb1)) - 1)) << nb2) | \
380 ((opcode >> (shift2)) & ((1 << (nb2)) - 1)); \
383 EXTRACT_HELPER(opc1
, 26, 6);
385 EXTRACT_HELPER(opc2
, 1, 5);
387 EXTRACT_HELPER(opc3
, 6, 5);
388 /* Update Cr0 flags */
389 EXTRACT_HELPER(Rc
, 0, 1);
391 EXTRACT_HELPER(rD
, 21, 5);
393 EXTRACT_HELPER(rS
, 21, 5);
395 EXTRACT_HELPER(rA
, 16, 5);
397 EXTRACT_HELPER(rB
, 11, 5);
399 EXTRACT_HELPER(rC
, 6, 5);
401 EXTRACT_HELPER(crfD
, 23, 3);
402 EXTRACT_HELPER(crfS
, 18, 3);
403 EXTRACT_HELPER(crbD
, 21, 5);
404 EXTRACT_HELPER(crbA
, 16, 5);
405 EXTRACT_HELPER(crbB
, 11, 5);
407 EXTRACT_HELPER(_SPR
, 11, 10);
408 static inline uint32_t SPR(uint32_t opcode
)
410 uint32_t sprn
= _SPR(opcode
);
412 return ((sprn
>> 5) & 0x1F) | ((sprn
& 0x1F) << 5);
414 /*** Get constants ***/
415 EXTRACT_HELPER(IMM
, 12, 8);
416 /* 16 bits signed immediate value */
417 EXTRACT_SHELPER(SIMM
, 0, 16);
418 /* 16 bits unsigned immediate value */
419 EXTRACT_HELPER(UIMM
, 0, 16);
420 /* 5 bits signed immediate value */
421 EXTRACT_HELPER(SIMM5
, 16, 5);
422 /* 5 bits signed immediate value */
423 EXTRACT_HELPER(UIMM5
, 16, 5);
425 EXTRACT_HELPER(NB
, 11, 5);
427 EXTRACT_HELPER(SH
, 11, 5);
428 /* Vector shift count */
429 EXTRACT_HELPER(VSH
, 6, 4);
431 EXTRACT_HELPER(MB
, 6, 5);
433 EXTRACT_HELPER(ME
, 1, 5);
435 EXTRACT_HELPER(TO
, 21, 5);
437 EXTRACT_HELPER(CRM
, 12, 8);
438 EXTRACT_HELPER(SR
, 16, 4);
441 EXTRACT_HELPER(FPBF
, 23, 3);
442 EXTRACT_HELPER(FPIMM
, 12, 4);
443 EXTRACT_HELPER(FPL
, 25, 1);
444 EXTRACT_HELPER(FPFLM
, 17, 8);
445 EXTRACT_HELPER(FPW
, 16, 1);
447 /*** Jump target decoding ***/
449 EXTRACT_SHELPER(d
, 0, 16);
450 /* Immediate address */
451 static inline target_ulong
LI(uint32_t opcode
)
453 return (opcode
>> 0) & 0x03FFFFFC;
456 static inline uint32_t BD(uint32_t opcode
)
458 return (opcode
>> 0) & 0xFFFC;
461 EXTRACT_HELPER(BO
, 21, 5);
462 EXTRACT_HELPER(BI
, 16, 5);
463 /* Absolute/relative address */
464 EXTRACT_HELPER(AA
, 1, 1);
466 EXTRACT_HELPER(LK
, 0, 1);
468 /* Create a mask between <start> and <end> bits */
469 static inline target_ulong
MASK(uint32_t start
, uint32_t end
)
473 #if defined(TARGET_PPC64)
474 if (likely(start
== 0)) {
475 ret
= UINT64_MAX
<< (63 - end
);
476 } else if (likely(end
== 63)) {
477 ret
= UINT64_MAX
>> start
;
480 if (likely(start
== 0)) {
481 ret
= UINT32_MAX
<< (31 - end
);
482 } else if (likely(end
== 31)) {
483 ret
= UINT32_MAX
>> start
;
487 ret
= (((target_ulong
)(-1ULL)) >> (start
)) ^
488 (((target_ulong
)(-1ULL) >> (end
)) >> 1);
489 if (unlikely(start
> end
))
496 EXTRACT_HELPER_SPLIT(xT
, 0, 1, 21, 5);
497 EXTRACT_HELPER_SPLIT(xS
, 0, 1, 21, 5);
498 EXTRACT_HELPER_SPLIT(xA
, 2, 1, 16, 5);
499 EXTRACT_HELPER_SPLIT(xB
, 1, 1, 11, 5);
500 EXTRACT_HELPER_SPLIT(xC
, 3, 1, 6, 5);
501 EXTRACT_HELPER(DM
, 8, 2);
502 EXTRACT_HELPER(UIM
, 16, 2);
503 EXTRACT_HELPER(SHW
, 8, 2);
504 /*****************************************************************************/
505 /* PowerPC instructions table */
507 #if defined(DO_PPC_STATISTICS)
508 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
518 .handler = &gen_##name, \
519 .oname = stringify(name), \
521 .oname = stringify(name), \
523 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
534 .handler = &gen_##name, \
535 .oname = stringify(name), \
537 .oname = stringify(name), \
539 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
549 .handler = &gen_##name, \
555 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
565 .handler = &gen_##name, \
567 .oname = stringify(name), \
569 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
580 .handler = &gen_##name, \
582 .oname = stringify(name), \
584 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
594 .handler = &gen_##name, \
600 /* SPR load/store helpers */
601 static inline void gen_load_spr(TCGv t
, int reg
)
603 tcg_gen_ld_tl(t
, cpu_env
, offsetof(CPUPPCState
, spr
[reg
]));
606 static inline void gen_store_spr(int reg
, TCGv t
)
608 tcg_gen_st_tl(t
, cpu_env
, offsetof(CPUPPCState
, spr
[reg
]));
611 /* Invalid instruction */
612 static void gen_invalid(DisasContext
*ctx
)
614 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
617 static opc_handler_t invalid_handler
= {
618 .inval1
= 0xFFFFFFFF,
619 .inval2
= 0xFFFFFFFF,
622 .handler
= gen_invalid
,
625 /*** Integer comparison ***/
627 static inline void gen_op_cmp(TCGv arg0
, TCGv arg1
, int s
, int crf
)
629 TCGv t0
= tcg_temp_new();
630 TCGv_i32 t1
= tcg_temp_new_i32();
632 tcg_gen_trunc_tl_i32(cpu_crf
[crf
], cpu_so
);
634 tcg_gen_setcond_tl((s
? TCG_COND_LT
: TCG_COND_LTU
), t0
, arg0
, arg1
);
635 tcg_gen_trunc_tl_i32(t1
, t0
);
636 tcg_gen_shli_i32(t1
, t1
, CRF_LT
);
637 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
639 tcg_gen_setcond_tl((s
? TCG_COND_GT
: TCG_COND_GTU
), t0
, arg0
, arg1
);
640 tcg_gen_trunc_tl_i32(t1
, t0
);
641 tcg_gen_shli_i32(t1
, t1
, CRF_GT
);
642 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
644 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, arg0
, arg1
);
645 tcg_gen_trunc_tl_i32(t1
, t0
);
646 tcg_gen_shli_i32(t1
, t1
, CRF_EQ
);
647 tcg_gen_or_i32(cpu_crf
[crf
], cpu_crf
[crf
], t1
);
650 tcg_temp_free_i32(t1
);
653 static inline void gen_op_cmpi(TCGv arg0
, target_ulong arg1
, int s
, int crf
)
655 TCGv t0
= tcg_const_tl(arg1
);
656 gen_op_cmp(arg0
, t0
, s
, crf
);
660 static inline void gen_op_cmp32(TCGv arg0
, TCGv arg1
, int s
, int crf
)
666 tcg_gen_ext32s_tl(t0
, arg0
);
667 tcg_gen_ext32s_tl(t1
, arg1
);
669 tcg_gen_ext32u_tl(t0
, arg0
);
670 tcg_gen_ext32u_tl(t1
, arg1
);
672 gen_op_cmp(t0
, t1
, s
, crf
);
677 static inline void gen_op_cmpi32(TCGv arg0
, target_ulong arg1
, int s
, int crf
)
679 TCGv t0
= tcg_const_tl(arg1
);
680 gen_op_cmp32(arg0
, t0
, s
, crf
);
684 static inline void gen_set_Rc0(DisasContext
*ctx
, TCGv reg
)
686 if (NARROW_MODE(ctx
)) {
687 gen_op_cmpi32(reg
, 0, 1, 0);
689 gen_op_cmpi(reg
, 0, 1, 0);
694 static void gen_cmp(DisasContext
*ctx
)
696 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
697 gen_op_cmp(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
698 1, crfD(ctx
->opcode
));
700 gen_op_cmp32(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
701 1, crfD(ctx
->opcode
));
706 static void gen_cmpi(DisasContext
*ctx
)
708 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
709 gen_op_cmpi(cpu_gpr
[rA(ctx
->opcode
)], SIMM(ctx
->opcode
),
710 1, crfD(ctx
->opcode
));
712 gen_op_cmpi32(cpu_gpr
[rA(ctx
->opcode
)], SIMM(ctx
->opcode
),
713 1, crfD(ctx
->opcode
));
718 static void gen_cmpl(DisasContext
*ctx
)
720 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
721 gen_op_cmp(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
722 0, crfD(ctx
->opcode
));
724 gen_op_cmp32(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
725 0, crfD(ctx
->opcode
));
730 static void gen_cmpli(DisasContext
*ctx
)
732 if ((ctx
->opcode
& 0x00200000) && (ctx
->insns_flags
& PPC_64B
)) {
733 gen_op_cmpi(cpu_gpr
[rA(ctx
->opcode
)], UIMM(ctx
->opcode
),
734 0, crfD(ctx
->opcode
));
736 gen_op_cmpi32(cpu_gpr
[rA(ctx
->opcode
)], UIMM(ctx
->opcode
),
737 0, crfD(ctx
->opcode
));
741 /* isel (PowerPC 2.03 specification) */
742 static void gen_isel(DisasContext
*ctx
)
745 uint32_t bi
= rC(ctx
->opcode
);
749 l1
= gen_new_label();
750 l2
= gen_new_label();
752 mask
= 1 << (3 - (bi
& 0x03));
753 t0
= tcg_temp_new_i32();
754 tcg_gen_andi_i32(t0
, cpu_crf
[bi
>> 2], mask
);
755 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
756 if (rA(ctx
->opcode
) == 0)
757 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
759 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
762 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
764 tcg_temp_free_i32(t0
);
767 /* cmpb: PowerPC 2.05 specification */
768 static void gen_cmpb(DisasContext
*ctx
)
770 gen_helper_cmpb(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)],
771 cpu_gpr
[rB(ctx
->opcode
)]);
774 /*** Integer arithmetic ***/
776 static inline void gen_op_arith_compute_ov(DisasContext
*ctx
, TCGv arg0
,
777 TCGv arg1
, TCGv arg2
, int sub
)
779 TCGv t0
= tcg_temp_new();
781 tcg_gen_xor_tl(cpu_ov
, arg0
, arg2
);
782 tcg_gen_xor_tl(t0
, arg1
, arg2
);
784 tcg_gen_and_tl(cpu_ov
, cpu_ov
, t0
);
786 tcg_gen_andc_tl(cpu_ov
, cpu_ov
, t0
);
789 if (NARROW_MODE(ctx
)) {
790 tcg_gen_ext32s_tl(cpu_ov
, cpu_ov
);
792 tcg_gen_shri_tl(cpu_ov
, cpu_ov
, TARGET_LONG_BITS
- 1);
793 tcg_gen_or_tl(cpu_so
, cpu_so
, cpu_ov
);
796 /* Common add function */
797 static inline void gen_op_arith_add(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
798 TCGv arg2
, bool add_ca
, bool compute_ca
,
799 bool compute_ov
, bool compute_rc0
)
803 if (compute_ca
|| compute_ov
) {
808 if (NARROW_MODE(ctx
)) {
809 /* Caution: a non-obvious corner case of the spec is that we
810 must produce the *entire* 64-bit addition, but produce the
811 carry into bit 32. */
812 TCGv t1
= tcg_temp_new();
813 tcg_gen_xor_tl(t1
, arg1
, arg2
); /* add without carry */
814 tcg_gen_add_tl(t0
, arg1
, arg2
);
816 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
818 tcg_gen_xor_tl(cpu_ca
, t0
, t1
); /* bits changed w/ carry */
820 tcg_gen_shri_tl(cpu_ca
, cpu_ca
, 32); /* extract bit 32 */
821 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
823 TCGv zero
= tcg_const_tl(0);
825 tcg_gen_add2_tl(t0
, cpu_ca
, arg1
, zero
, cpu_ca
, zero
);
826 tcg_gen_add2_tl(t0
, cpu_ca
, t0
, cpu_ca
, arg2
, zero
);
828 tcg_gen_add2_tl(t0
, cpu_ca
, arg1
, zero
, arg2
, zero
);
833 tcg_gen_add_tl(t0
, arg1
, arg2
);
835 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
840 gen_op_arith_compute_ov(ctx
, t0
, arg1
, arg2
, 0);
842 if (unlikely(compute_rc0
)) {
843 gen_set_Rc0(ctx
, t0
);
846 if (!TCGV_EQUAL(t0
, ret
)) {
847 tcg_gen_mov_tl(ret
, t0
);
851 /* Add functions with two operands */
852 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
853 static void glue(gen_, name)(DisasContext *ctx) \
855 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
856 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
857 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
859 /* Add functions with one operand and one immediate */
860 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
861 add_ca, compute_ca, compute_ov) \
862 static void glue(gen_, name)(DisasContext *ctx) \
864 TCGv t0 = tcg_const_tl(const_val); \
865 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
866 cpu_gpr[rA(ctx->opcode)], t0, \
867 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
871 /* add add. addo addo. */
872 GEN_INT_ARITH_ADD(add
, 0x08, 0, 0, 0)
873 GEN_INT_ARITH_ADD(addo
, 0x18, 0, 0, 1)
874 /* addc addc. addco addco. */
875 GEN_INT_ARITH_ADD(addc
, 0x00, 0, 1, 0)
876 GEN_INT_ARITH_ADD(addco
, 0x10, 0, 1, 1)
877 /* adde adde. addeo addeo. */
878 GEN_INT_ARITH_ADD(adde
, 0x04, 1, 1, 0)
879 GEN_INT_ARITH_ADD(addeo
, 0x14, 1, 1, 1)
880 /* addme addme. addmeo addmeo. */
881 GEN_INT_ARITH_ADD_CONST(addme
, 0x07, -1LL, 1, 1, 0)
882 GEN_INT_ARITH_ADD_CONST(addmeo
, 0x17, -1LL, 1, 1, 1)
883 /* addze addze. addzeo addzeo.*/
884 GEN_INT_ARITH_ADD_CONST(addze
, 0x06, 0, 1, 1, 0)
885 GEN_INT_ARITH_ADD_CONST(addzeo
, 0x16, 0, 1, 1, 1)
887 static void gen_addi(DisasContext
*ctx
)
889 target_long simm
= SIMM(ctx
->opcode
);
891 if (rA(ctx
->opcode
) == 0) {
893 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
);
895 tcg_gen_addi_tl(cpu_gpr
[rD(ctx
->opcode
)],
896 cpu_gpr
[rA(ctx
->opcode
)], simm
);
900 static inline void gen_op_addic(DisasContext
*ctx
, bool compute_rc0
)
902 TCGv c
= tcg_const_tl(SIMM(ctx
->opcode
));
903 gen_op_arith_add(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
904 c
, 0, 1, 0, compute_rc0
);
908 static void gen_addic(DisasContext
*ctx
)
910 gen_op_addic(ctx
, 0);
913 static void gen_addic_(DisasContext
*ctx
)
915 gen_op_addic(ctx
, 1);
919 static void gen_addis(DisasContext
*ctx
)
921 target_long simm
= SIMM(ctx
->opcode
);
923 if (rA(ctx
->opcode
) == 0) {
925 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
<< 16);
927 tcg_gen_addi_tl(cpu_gpr
[rD(ctx
->opcode
)],
928 cpu_gpr
[rA(ctx
->opcode
)], simm
<< 16);
932 static inline void gen_op_arith_divw(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
933 TCGv arg2
, int sign
, int compute_ov
)
935 int l1
= gen_new_label();
936 int l2
= gen_new_label();
937 TCGv_i32 t0
= tcg_temp_local_new_i32();
938 TCGv_i32 t1
= tcg_temp_local_new_i32();
940 tcg_gen_trunc_tl_i32(t0
, arg1
);
941 tcg_gen_trunc_tl_i32(t1
, arg2
);
942 tcg_gen_brcondi_i32(TCG_COND_EQ
, t1
, 0, l1
);
944 int l3
= gen_new_label();
945 tcg_gen_brcondi_i32(TCG_COND_NE
, t1
, -1, l3
);
946 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, INT32_MIN
, l1
);
948 tcg_gen_div_i32(t0
, t0
, t1
);
950 tcg_gen_divu_i32(t0
, t0
, t1
);
953 tcg_gen_movi_tl(cpu_ov
, 0);
958 tcg_gen_sari_i32(t0
, t0
, 31);
960 tcg_gen_movi_i32(t0
, 0);
963 tcg_gen_movi_tl(cpu_ov
, 1);
964 tcg_gen_movi_tl(cpu_so
, 1);
967 tcg_gen_extu_i32_tl(ret
, t0
);
968 tcg_temp_free_i32(t0
);
969 tcg_temp_free_i32(t1
);
970 if (unlikely(Rc(ctx
->opcode
) != 0))
971 gen_set_Rc0(ctx
, ret
);
974 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
975 static void glue(gen_, name)(DisasContext *ctx) \
977 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
978 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
981 /* divwu divwu. divwuo divwuo. */
982 GEN_INT_ARITH_DIVW(divwu
, 0x0E, 0, 0);
983 GEN_INT_ARITH_DIVW(divwuo
, 0x1E, 0, 1);
984 /* divw divw. divwo divwo. */
985 GEN_INT_ARITH_DIVW(divw
, 0x0F, 1, 0);
986 GEN_INT_ARITH_DIVW(divwo
, 0x1F, 1, 1);
987 #if defined(TARGET_PPC64)
988 static inline void gen_op_arith_divd(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
989 TCGv arg2
, int sign
, int compute_ov
)
991 int l1
= gen_new_label();
992 int l2
= gen_new_label();
994 tcg_gen_brcondi_i64(TCG_COND_EQ
, arg2
, 0, l1
);
996 int l3
= gen_new_label();
997 tcg_gen_brcondi_i64(TCG_COND_NE
, arg2
, -1, l3
);
998 tcg_gen_brcondi_i64(TCG_COND_EQ
, arg1
, INT64_MIN
, l1
);
1000 tcg_gen_div_i64(ret
, arg1
, arg2
);
1002 tcg_gen_divu_i64(ret
, arg1
, arg2
);
1005 tcg_gen_movi_tl(cpu_ov
, 0);
1010 tcg_gen_sari_i64(ret
, arg1
, 63);
1012 tcg_gen_movi_i64(ret
, 0);
1015 tcg_gen_movi_tl(cpu_ov
, 1);
1016 tcg_gen_movi_tl(cpu_so
, 1);
1019 if (unlikely(Rc(ctx
->opcode
) != 0))
1020 gen_set_Rc0(ctx
, ret
);
1022 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1023 static void glue(gen_, name)(DisasContext *ctx) \
1025 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1026 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1027 sign, compute_ov); \
1029 /* divwu divwu. divwuo divwuo. */
1030 GEN_INT_ARITH_DIVD(divdu
, 0x0E, 0, 0);
1031 GEN_INT_ARITH_DIVD(divduo
, 0x1E, 0, 1);
1032 /* divw divw. divwo divwo. */
1033 GEN_INT_ARITH_DIVD(divd
, 0x0F, 1, 0);
1034 GEN_INT_ARITH_DIVD(divdo
, 0x1F, 1, 1);
1038 static void gen_mulhw(DisasContext
*ctx
)
1040 TCGv_i32 t0
= tcg_temp_new_i32();
1041 TCGv_i32 t1
= tcg_temp_new_i32();
1043 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1044 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1045 tcg_gen_muls2_i32(t0
, t1
, t0
, t1
);
1046 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
1047 tcg_temp_free_i32(t0
);
1048 tcg_temp_free_i32(t1
);
1049 if (unlikely(Rc(ctx
->opcode
) != 0))
1050 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1053 /* mulhwu mulhwu. */
1054 static void gen_mulhwu(DisasContext
*ctx
)
1056 TCGv_i32 t0
= tcg_temp_new_i32();
1057 TCGv_i32 t1
= tcg_temp_new_i32();
1059 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1060 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1061 tcg_gen_mulu2_i32(t0
, t1
, t0
, t1
);
1062 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
1063 tcg_temp_free_i32(t0
);
1064 tcg_temp_free_i32(t1
);
1065 if (unlikely(Rc(ctx
->opcode
) != 0))
1066 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1070 static void gen_mullw(DisasContext
*ctx
)
1072 tcg_gen_mul_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1073 cpu_gpr
[rB(ctx
->opcode
)]);
1074 tcg_gen_ext32s_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)]);
1075 if (unlikely(Rc(ctx
->opcode
) != 0))
1076 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1079 /* mullwo mullwo. */
1080 static void gen_mullwo(DisasContext
*ctx
)
1082 TCGv_i32 t0
= tcg_temp_new_i32();
1083 TCGv_i32 t1
= tcg_temp_new_i32();
1085 tcg_gen_trunc_tl_i32(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
1086 tcg_gen_trunc_tl_i32(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
1087 tcg_gen_muls2_i32(t0
, t1
, t0
, t1
);
1088 tcg_gen_ext_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
1090 tcg_gen_sari_i32(t0
, t0
, 31);
1091 tcg_gen_setcond_i32(TCG_COND_NE
, t0
, t0
, t1
);
1092 tcg_gen_extu_i32_tl(cpu_ov
, t0
);
1093 tcg_gen_or_tl(cpu_so
, cpu_so
, cpu_ov
);
1095 tcg_temp_free_i32(t0
);
1096 tcg_temp_free_i32(t1
);
1097 if (unlikely(Rc(ctx
->opcode
) != 0))
1098 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1102 static void gen_mulli(DisasContext
*ctx
)
1104 tcg_gen_muli_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1108 #if defined(TARGET_PPC64)
1110 static void gen_mulhd(DisasContext
*ctx
)
1112 TCGv lo
= tcg_temp_new();
1113 tcg_gen_muls2_tl(lo
, cpu_gpr
[rD(ctx
->opcode
)],
1114 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1116 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1117 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1121 /* mulhdu mulhdu. */
1122 static void gen_mulhdu(DisasContext
*ctx
)
1124 TCGv lo
= tcg_temp_new();
1125 tcg_gen_mulu2_tl(lo
, cpu_gpr
[rD(ctx
->opcode
)],
1126 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1128 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1129 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1134 static void gen_mulld(DisasContext
*ctx
)
1136 tcg_gen_mul_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1137 cpu_gpr
[rB(ctx
->opcode
)]);
1138 if (unlikely(Rc(ctx
->opcode
) != 0))
1139 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1142 /* mulldo mulldo. */
1143 static void gen_mulldo(DisasContext
*ctx
)
1145 gen_helper_mulldo(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
1146 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1147 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1148 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
1153 /* Common subf function */
1154 static inline void gen_op_arith_subf(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
1155 TCGv arg2
, bool add_ca
, bool compute_ca
,
1156 bool compute_ov
, bool compute_rc0
)
1160 if (compute_ca
|| compute_ov
) {
1161 t0
= tcg_temp_new();
1165 /* dest = ~arg1 + arg2 [+ ca]. */
1166 if (NARROW_MODE(ctx
)) {
1167 /* Caution: a non-obvious corner case of the spec is that we
1168 must produce the *entire* 64-bit addition, but produce the
1169 carry into bit 32. */
1170 TCGv inv1
= tcg_temp_new();
1171 TCGv t1
= tcg_temp_new();
1172 tcg_gen_not_tl(inv1
, arg1
);
1174 tcg_gen_add_tl(t0
, arg2
, cpu_ca
);
1176 tcg_gen_addi_tl(t0
, arg2
, 1);
1178 tcg_gen_xor_tl(t1
, arg2
, inv1
); /* add without carry */
1179 tcg_gen_add_tl(t0
, t0
, inv1
);
1180 tcg_gen_xor_tl(cpu_ca
, t0
, t1
); /* bits changes w/ carry */
1182 tcg_gen_shri_tl(cpu_ca
, cpu_ca
, 32); /* extract bit 32 */
1183 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
1184 } else if (add_ca
) {
1185 TCGv zero
, inv1
= tcg_temp_new();
1186 tcg_gen_not_tl(inv1
, arg1
);
1187 zero
= tcg_const_tl(0);
1188 tcg_gen_add2_tl(t0
, cpu_ca
, arg2
, zero
, cpu_ca
, zero
);
1189 tcg_gen_add2_tl(t0
, cpu_ca
, t0
, cpu_ca
, inv1
, zero
);
1190 tcg_temp_free(zero
);
1191 tcg_temp_free(inv1
);
1193 tcg_gen_setcond_tl(TCG_COND_GEU
, cpu_ca
, arg2
, arg1
);
1194 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1196 } else if (add_ca
) {
1197 /* Since we're ignoring carry-out, we can simplify the
1198 standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. */
1199 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1200 tcg_gen_add_tl(t0
, t0
, cpu_ca
);
1201 tcg_gen_subi_tl(t0
, t0
, 1);
1203 tcg_gen_sub_tl(t0
, arg2
, arg1
);
1207 gen_op_arith_compute_ov(ctx
, t0
, arg1
, arg2
, 1);
1209 if (unlikely(compute_rc0
)) {
1210 gen_set_Rc0(ctx
, t0
);
1213 if (!TCGV_EQUAL(t0
, ret
)) {
1214 tcg_gen_mov_tl(ret
, t0
);
1218 /* Sub functions with Two operands functions */
1219 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1220 static void glue(gen_, name)(DisasContext *ctx) \
1222 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1223 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1224 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1226 /* Sub functions with one operand and one immediate */
1227 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1228 add_ca, compute_ca, compute_ov) \
1229 static void glue(gen_, name)(DisasContext *ctx) \
1231 TCGv t0 = tcg_const_tl(const_val); \
1232 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1233 cpu_gpr[rA(ctx->opcode)], t0, \
1234 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1235 tcg_temp_free(t0); \
1237 /* subf subf. subfo subfo. */
1238 GEN_INT_ARITH_SUBF(subf
, 0x01, 0, 0, 0)
1239 GEN_INT_ARITH_SUBF(subfo
, 0x11, 0, 0, 1)
1240 /* subfc subfc. subfco subfco. */
1241 GEN_INT_ARITH_SUBF(subfc
, 0x00, 0, 1, 0)
1242 GEN_INT_ARITH_SUBF(subfco
, 0x10, 0, 1, 1)
1243 /* subfe subfe. subfeo subfo. */
1244 GEN_INT_ARITH_SUBF(subfe
, 0x04, 1, 1, 0)
1245 GEN_INT_ARITH_SUBF(subfeo
, 0x14, 1, 1, 1)
1246 /* subfme subfme. subfmeo subfmeo. */
1247 GEN_INT_ARITH_SUBF_CONST(subfme
, 0x07, -1LL, 1, 1, 0)
1248 GEN_INT_ARITH_SUBF_CONST(subfmeo
, 0x17, -1LL, 1, 1, 1)
1249 /* subfze subfze. subfzeo subfzeo.*/
1250 GEN_INT_ARITH_SUBF_CONST(subfze
, 0x06, 0, 1, 1, 0)
1251 GEN_INT_ARITH_SUBF_CONST(subfzeo
, 0x16, 0, 1, 1, 1)
1254 static void gen_subfic(DisasContext
*ctx
)
1256 TCGv c
= tcg_const_tl(SIMM(ctx
->opcode
));
1257 gen_op_arith_subf(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1262 /* neg neg. nego nego. */
1263 static inline void gen_op_arith_neg(DisasContext
*ctx
, bool compute_ov
)
1265 TCGv zero
= tcg_const_tl(0);
1266 gen_op_arith_subf(ctx
, cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)],
1267 zero
, 0, 0, compute_ov
, Rc(ctx
->opcode
));
1268 tcg_temp_free(zero
);
1271 static void gen_neg(DisasContext
*ctx
)
1273 gen_op_arith_neg(ctx
, 0);
1276 static void gen_nego(DisasContext
*ctx
)
1278 gen_op_arith_neg(ctx
, 1);
1281 /*** Integer logical ***/
1282 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1283 static void glue(gen_, name)(DisasContext *ctx) \
1285 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1286 cpu_gpr[rB(ctx->opcode)]); \
1287 if (unlikely(Rc(ctx->opcode) != 0)) \
1288 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1291 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1292 static void glue(gen_, name)(DisasContext *ctx) \
1294 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1295 if (unlikely(Rc(ctx->opcode) != 0)) \
1296 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1300 GEN_LOGICAL2(and, tcg_gen_and_tl
, 0x00, PPC_INTEGER
);
1302 GEN_LOGICAL2(andc
, tcg_gen_andc_tl
, 0x01, PPC_INTEGER
);
1305 static void gen_andi_(DisasContext
*ctx
)
1307 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], UIMM(ctx
->opcode
));
1308 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1312 static void gen_andis_(DisasContext
*ctx
)
1314 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], UIMM(ctx
->opcode
) << 16);
1315 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1319 static void gen_cntlzw(DisasContext
*ctx
)
1321 gen_helper_cntlzw(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1322 if (unlikely(Rc(ctx
->opcode
) != 0))
1323 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1326 GEN_LOGICAL2(eqv
, tcg_gen_eqv_tl
, 0x08, PPC_INTEGER
);
1327 /* extsb & extsb. */
1328 GEN_LOGICAL1(extsb
, tcg_gen_ext8s_tl
, 0x1D, PPC_INTEGER
);
1329 /* extsh & extsh. */
1330 GEN_LOGICAL1(extsh
, tcg_gen_ext16s_tl
, 0x1C, PPC_INTEGER
);
1332 GEN_LOGICAL2(nand
, tcg_gen_nand_tl
, 0x0E, PPC_INTEGER
);
1334 GEN_LOGICAL2(nor
, tcg_gen_nor_tl
, 0x03, PPC_INTEGER
);
1337 static void gen_or(DisasContext
*ctx
)
1341 rs
= rS(ctx
->opcode
);
1342 ra
= rA(ctx
->opcode
);
1343 rb
= rB(ctx
->opcode
);
1344 /* Optimisation for mr. ri case */
1345 if (rs
!= ra
|| rs
!= rb
) {
1347 tcg_gen_or_tl(cpu_gpr
[ra
], cpu_gpr
[rs
], cpu_gpr
[rb
]);
1349 tcg_gen_mov_tl(cpu_gpr
[ra
], cpu_gpr
[rs
]);
1350 if (unlikely(Rc(ctx
->opcode
) != 0))
1351 gen_set_Rc0(ctx
, cpu_gpr
[ra
]);
1352 } else if (unlikely(Rc(ctx
->opcode
) != 0)) {
1353 gen_set_Rc0(ctx
, cpu_gpr
[rs
]);
1354 #if defined(TARGET_PPC64)
1360 /* Set process priority to low */
1364 /* Set process priority to medium-low */
1368 /* Set process priority to normal */
1371 #if !defined(CONFIG_USER_ONLY)
1373 if (ctx
->mem_idx
> 0) {
1374 /* Set process priority to very low */
1379 if (ctx
->mem_idx
> 0) {
1380 /* Set process priority to medium-hight */
1385 if (ctx
->mem_idx
> 0) {
1386 /* Set process priority to high */
1391 if (ctx
->mem_idx
> 1) {
1392 /* Set process priority to very high */
1402 TCGv t0
= tcg_temp_new();
1403 gen_load_spr(t0
, SPR_PPR
);
1404 tcg_gen_andi_tl(t0
, t0
, ~0x001C000000000000ULL
);
1405 tcg_gen_ori_tl(t0
, t0
, ((uint64_t)prio
) << 50);
1406 gen_store_spr(SPR_PPR
, t0
);
1413 GEN_LOGICAL2(orc
, tcg_gen_orc_tl
, 0x0C, PPC_INTEGER
);
1416 static void gen_xor(DisasContext
*ctx
)
1418 /* Optimisation for "set to zero" case */
1419 if (rS(ctx
->opcode
) != rB(ctx
->opcode
))
1420 tcg_gen_xor_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1422 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
1423 if (unlikely(Rc(ctx
->opcode
) != 0))
1424 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1428 static void gen_ori(DisasContext
*ctx
)
1430 target_ulong uimm
= UIMM(ctx
->opcode
);
1432 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1434 /* XXX: should handle special NOPs for POWER series */
1437 tcg_gen_ori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
);
1441 static void gen_oris(DisasContext
*ctx
)
1443 target_ulong uimm
= UIMM(ctx
->opcode
);
1445 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1449 tcg_gen_ori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
<< 16);
1453 static void gen_xori(DisasContext
*ctx
)
1455 target_ulong uimm
= UIMM(ctx
->opcode
);
1457 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1461 tcg_gen_xori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
);
1465 static void gen_xoris(DisasContext
*ctx
)
1467 target_ulong uimm
= UIMM(ctx
->opcode
);
1469 if (rS(ctx
->opcode
) == rA(ctx
->opcode
) && uimm
== 0) {
1473 tcg_gen_xori_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], uimm
<< 16);
1476 /* popcntb : PowerPC 2.03 specification */
1477 static void gen_popcntb(DisasContext
*ctx
)
1479 gen_helper_popcntb(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1482 static void gen_popcntw(DisasContext
*ctx
)
1484 gen_helper_popcntw(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1487 #if defined(TARGET_PPC64)
1488 /* popcntd: PowerPC 2.06 specification */
1489 static void gen_popcntd(DisasContext
*ctx
)
1491 gen_helper_popcntd(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1495 /* prtyw: PowerPC 2.05 specification */
1496 static void gen_prtyw(DisasContext
*ctx
)
1498 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1499 TCGv rs
= cpu_gpr
[rS(ctx
->opcode
)];
1500 TCGv t0
= tcg_temp_new();
1501 tcg_gen_shri_tl(t0
, rs
, 16);
1502 tcg_gen_xor_tl(ra
, rs
, t0
);
1503 tcg_gen_shri_tl(t0
, ra
, 8);
1504 tcg_gen_xor_tl(ra
, ra
, t0
);
1505 tcg_gen_andi_tl(ra
, ra
, (target_ulong
)0x100000001ULL
);
1509 #if defined(TARGET_PPC64)
1510 /* prtyd: PowerPC 2.05 specification */
1511 static void gen_prtyd(DisasContext
*ctx
)
1513 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1514 TCGv rs
= cpu_gpr
[rS(ctx
->opcode
)];
1515 TCGv t0
= tcg_temp_new();
1516 tcg_gen_shri_tl(t0
, rs
, 32);
1517 tcg_gen_xor_tl(ra
, rs
, t0
);
1518 tcg_gen_shri_tl(t0
, ra
, 16);
1519 tcg_gen_xor_tl(ra
, ra
, t0
);
1520 tcg_gen_shri_tl(t0
, ra
, 8);
1521 tcg_gen_xor_tl(ra
, ra
, t0
);
1522 tcg_gen_andi_tl(ra
, ra
, 1);
1527 #if defined(TARGET_PPC64)
1528 /* extsw & extsw. */
1529 GEN_LOGICAL1(extsw
, tcg_gen_ext32s_tl
, 0x1E, PPC_64B
);
1532 static void gen_cntlzd(DisasContext
*ctx
)
1534 gen_helper_cntlzd(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1535 if (unlikely(Rc(ctx
->opcode
) != 0))
1536 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1540 /*** Integer rotate ***/
1542 /* rlwimi & rlwimi. */
1543 static void gen_rlwimi(DisasContext
*ctx
)
1545 uint32_t mb
, me
, sh
;
1547 mb
= MB(ctx
->opcode
);
1548 me
= ME(ctx
->opcode
);
1549 sh
= SH(ctx
->opcode
);
1550 if (likely(sh
== 0 && mb
== 0 && me
== 31)) {
1551 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1555 TCGv t0
= tcg_temp_new();
1556 #if defined(TARGET_PPC64)
1557 TCGv_i32 t2
= tcg_temp_new_i32();
1558 tcg_gen_trunc_i64_i32(t2
, cpu_gpr
[rS(ctx
->opcode
)]);
1559 tcg_gen_rotli_i32(t2
, t2
, sh
);
1560 tcg_gen_extu_i32_i64(t0
, t2
);
1561 tcg_temp_free_i32(t2
);
1563 tcg_gen_rotli_i32(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
1565 #if defined(TARGET_PPC64)
1569 mask
= MASK(mb
, me
);
1570 t1
= tcg_temp_new();
1571 tcg_gen_andi_tl(t0
, t0
, mask
);
1572 tcg_gen_andi_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], ~mask
);
1573 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1577 if (unlikely(Rc(ctx
->opcode
) != 0))
1578 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1581 /* rlwinm & rlwinm. */
1582 static void gen_rlwinm(DisasContext
*ctx
)
1584 uint32_t mb
, me
, sh
;
1586 sh
= SH(ctx
->opcode
);
1587 mb
= MB(ctx
->opcode
);
1588 me
= ME(ctx
->opcode
);
1590 if (likely(mb
== 0 && me
== (31 - sh
))) {
1591 if (likely(sh
== 0)) {
1592 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1594 TCGv t0
= tcg_temp_new();
1595 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)]);
1596 tcg_gen_shli_tl(t0
, t0
, sh
);
1597 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
1600 } else if (likely(sh
!= 0 && me
== 31 && sh
== (32 - mb
))) {
1601 TCGv t0
= tcg_temp_new();
1602 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)]);
1603 tcg_gen_shri_tl(t0
, t0
, mb
);
1604 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
1607 TCGv t0
= tcg_temp_new();
1608 #if defined(TARGET_PPC64)
1609 TCGv_i32 t1
= tcg_temp_new_i32();
1610 tcg_gen_trunc_i64_i32(t1
, cpu_gpr
[rS(ctx
->opcode
)]);
1611 tcg_gen_rotli_i32(t1
, t1
, sh
);
1612 tcg_gen_extu_i32_i64(t0
, t1
);
1613 tcg_temp_free_i32(t1
);
1615 tcg_gen_rotli_i32(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
1617 #if defined(TARGET_PPC64)
1621 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, MASK(mb
, me
));
1624 if (unlikely(Rc(ctx
->opcode
) != 0))
1625 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1628 /* rlwnm & rlwnm. */
1629 static void gen_rlwnm(DisasContext
*ctx
)
1633 #if defined(TARGET_PPC64)
1637 mb
= MB(ctx
->opcode
);
1638 me
= ME(ctx
->opcode
);
1639 t0
= tcg_temp_new();
1640 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1f);
1641 #if defined(TARGET_PPC64)
1642 t1
= tcg_temp_new_i32();
1643 t2
= tcg_temp_new_i32();
1644 tcg_gen_trunc_i64_i32(t1
, cpu_gpr
[rS(ctx
->opcode
)]);
1645 tcg_gen_trunc_i64_i32(t2
, t0
);
1646 tcg_gen_rotl_i32(t1
, t1
, t2
);
1647 tcg_gen_extu_i32_i64(t0
, t1
);
1648 tcg_temp_free_i32(t1
);
1649 tcg_temp_free_i32(t2
);
1651 tcg_gen_rotl_i32(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1653 if (unlikely(mb
!= 0 || me
!= 31)) {
1654 #if defined(TARGET_PPC64)
1658 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, MASK(mb
, me
));
1660 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
1663 if (unlikely(Rc(ctx
->opcode
) != 0))
1664 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1667 #if defined(TARGET_PPC64)
1668 #define GEN_PPC64_R2(name, opc1, opc2) \
1669 static void glue(gen_, name##0)(DisasContext *ctx) \
1671 gen_##name(ctx, 0); \
1674 static void glue(gen_, name##1)(DisasContext *ctx) \
1676 gen_##name(ctx, 1); \
1678 #define GEN_PPC64_R4(name, opc1, opc2) \
1679 static void glue(gen_, name##0)(DisasContext *ctx) \
1681 gen_##name(ctx, 0, 0); \
1684 static void glue(gen_, name##1)(DisasContext *ctx) \
1686 gen_##name(ctx, 0, 1); \
1689 static void glue(gen_, name##2)(DisasContext *ctx) \
1691 gen_##name(ctx, 1, 0); \
1694 static void glue(gen_, name##3)(DisasContext *ctx) \
1696 gen_##name(ctx, 1, 1); \
1699 static inline void gen_rldinm(DisasContext
*ctx
, uint32_t mb
, uint32_t me
,
1702 if (likely(sh
!= 0 && mb
== 0 && me
== (63 - sh
))) {
1703 tcg_gen_shli_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], sh
);
1704 } else if (likely(sh
!= 0 && me
== 63 && sh
== (64 - mb
))) {
1705 tcg_gen_shri_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], mb
);
1707 TCGv t0
= tcg_temp_new();
1708 tcg_gen_rotli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
1709 if (likely(mb
== 0 && me
== 63)) {
1710 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
1712 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, MASK(mb
, me
));
1716 if (unlikely(Rc(ctx
->opcode
) != 0))
1717 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1719 /* rldicl - rldicl. */
1720 static inline void gen_rldicl(DisasContext
*ctx
, int mbn
, int shn
)
1724 sh
= SH(ctx
->opcode
) | (shn
<< 5);
1725 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
1726 gen_rldinm(ctx
, mb
, 63, sh
);
1728 GEN_PPC64_R4(rldicl
, 0x1E, 0x00);
1729 /* rldicr - rldicr. */
1730 static inline void gen_rldicr(DisasContext
*ctx
, int men
, int shn
)
1734 sh
= SH(ctx
->opcode
) | (shn
<< 5);
1735 me
= MB(ctx
->opcode
) | (men
<< 5);
1736 gen_rldinm(ctx
, 0, me
, sh
);
1738 GEN_PPC64_R4(rldicr
, 0x1E, 0x02);
1739 /* rldic - rldic. */
1740 static inline void gen_rldic(DisasContext
*ctx
, int mbn
, int shn
)
1744 sh
= SH(ctx
->opcode
) | (shn
<< 5);
1745 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
1746 gen_rldinm(ctx
, mb
, 63 - sh
, sh
);
1748 GEN_PPC64_R4(rldic
, 0x1E, 0x04);
1750 static inline void gen_rldnm(DisasContext
*ctx
, uint32_t mb
, uint32_t me
)
1754 t0
= tcg_temp_new();
1755 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x3f);
1756 tcg_gen_rotl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1757 if (unlikely(mb
!= 0 || me
!= 63)) {
1758 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, MASK(mb
, me
));
1760 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
1763 if (unlikely(Rc(ctx
->opcode
) != 0))
1764 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1767 /* rldcl - rldcl. */
1768 static inline void gen_rldcl(DisasContext
*ctx
, int mbn
)
1772 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
1773 gen_rldnm(ctx
, mb
, 63);
1775 GEN_PPC64_R2(rldcl
, 0x1E, 0x08);
1776 /* rldcr - rldcr. */
1777 static inline void gen_rldcr(DisasContext
*ctx
, int men
)
1781 me
= MB(ctx
->opcode
) | (men
<< 5);
1782 gen_rldnm(ctx
, 0, me
);
1784 GEN_PPC64_R2(rldcr
, 0x1E, 0x09);
1785 /* rldimi - rldimi. */
1786 static inline void gen_rldimi(DisasContext
*ctx
, int mbn
, int shn
)
1788 uint32_t sh
, mb
, me
;
1790 sh
= SH(ctx
->opcode
) | (shn
<< 5);
1791 mb
= MB(ctx
->opcode
) | (mbn
<< 5);
1793 if (unlikely(sh
== 0 && mb
== 0)) {
1794 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)]);
1799 t0
= tcg_temp_new();
1800 tcg_gen_rotli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
1801 t1
= tcg_temp_new();
1802 mask
= MASK(mb
, me
);
1803 tcg_gen_andi_tl(t0
, t0
, mask
);
1804 tcg_gen_andi_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], ~mask
);
1805 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1809 if (unlikely(Rc(ctx
->opcode
) != 0))
1810 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1812 GEN_PPC64_R4(rldimi
, 0x1E, 0x06);
1815 /*** Integer shift ***/
1818 static void gen_slw(DisasContext
*ctx
)
1822 t0
= tcg_temp_new();
1823 /* AND rS with a mask that is 0 when rB >= 0x20 */
1824 #if defined(TARGET_PPC64)
1825 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x3a);
1826 tcg_gen_sari_tl(t0
, t0
, 0x3f);
1828 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1a);
1829 tcg_gen_sari_tl(t0
, t0
, 0x1f);
1831 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1832 t1
= tcg_temp_new();
1833 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1f);
1834 tcg_gen_shl_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1837 tcg_gen_ext32u_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
1838 if (unlikely(Rc(ctx
->opcode
) != 0))
1839 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1843 static void gen_sraw(DisasContext
*ctx
)
1845 gen_helper_sraw(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
1846 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1847 if (unlikely(Rc(ctx
->opcode
) != 0))
1848 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1851 /* srawi & srawi. */
1852 static void gen_srawi(DisasContext
*ctx
)
1854 int sh
= SH(ctx
->opcode
);
1855 TCGv dst
= cpu_gpr
[rA(ctx
->opcode
)];
1856 TCGv src
= cpu_gpr
[rS(ctx
->opcode
)];
1858 tcg_gen_mov_tl(dst
, src
);
1859 tcg_gen_movi_tl(cpu_ca
, 0);
1862 tcg_gen_ext32s_tl(dst
, src
);
1863 tcg_gen_andi_tl(cpu_ca
, dst
, (1ULL << sh
) - 1);
1864 t0
= tcg_temp_new();
1865 tcg_gen_sari_tl(t0
, dst
, TARGET_LONG_BITS
- 1);
1866 tcg_gen_and_tl(cpu_ca
, cpu_ca
, t0
);
1868 tcg_gen_setcondi_tl(TCG_COND_NE
, cpu_ca
, cpu_ca
, 0);
1869 tcg_gen_sari_tl(dst
, dst
, sh
);
1871 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1872 gen_set_Rc0(ctx
, dst
);
1877 static void gen_srw(DisasContext
*ctx
)
1881 t0
= tcg_temp_new();
1882 /* AND rS with a mask that is 0 when rB >= 0x20 */
1883 #if defined(TARGET_PPC64)
1884 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x3a);
1885 tcg_gen_sari_tl(t0
, t0
, 0x3f);
1887 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1a);
1888 tcg_gen_sari_tl(t0
, t0
, 0x1f);
1890 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1891 tcg_gen_ext32u_tl(t0
, t0
);
1892 t1
= tcg_temp_new();
1893 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1f);
1894 tcg_gen_shr_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1897 if (unlikely(Rc(ctx
->opcode
) != 0))
1898 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1901 #if defined(TARGET_PPC64)
1903 static void gen_sld(DisasContext
*ctx
)
1907 t0
= tcg_temp_new();
1908 /* AND rS with a mask that is 0 when rB >= 0x40 */
1909 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x39);
1910 tcg_gen_sari_tl(t0
, t0
, 0x3f);
1911 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1912 t1
= tcg_temp_new();
1913 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x3f);
1914 tcg_gen_shl_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1917 if (unlikely(Rc(ctx
->opcode
) != 0))
1918 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1922 static void gen_srad(DisasContext
*ctx
)
1924 gen_helper_srad(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
1925 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
1926 if (unlikely(Rc(ctx
->opcode
) != 0))
1927 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1929 /* sradi & sradi. */
1930 static inline void gen_sradi(DisasContext
*ctx
, int n
)
1932 int sh
= SH(ctx
->opcode
) + (n
<< 5);
1933 TCGv dst
= cpu_gpr
[rA(ctx
->opcode
)];
1934 TCGv src
= cpu_gpr
[rS(ctx
->opcode
)];
1936 tcg_gen_mov_tl(dst
, src
);
1937 tcg_gen_movi_tl(cpu_ca
, 0);
1940 tcg_gen_andi_tl(cpu_ca
, src
, (1ULL << sh
) - 1);
1941 t0
= tcg_temp_new();
1942 tcg_gen_sari_tl(t0
, src
, TARGET_LONG_BITS
- 1);
1943 tcg_gen_and_tl(cpu_ca
, cpu_ca
, t0
);
1945 tcg_gen_setcondi_tl(TCG_COND_NE
, cpu_ca
, cpu_ca
, 0);
1946 tcg_gen_sari_tl(dst
, src
, sh
);
1948 if (unlikely(Rc(ctx
->opcode
) != 0)) {
1949 gen_set_Rc0(ctx
, dst
);
1953 static void gen_sradi0(DisasContext
*ctx
)
1958 static void gen_sradi1(DisasContext
*ctx
)
1964 static void gen_srd(DisasContext
*ctx
)
1968 t0
= tcg_temp_new();
1969 /* AND rS with a mask that is 0 when rB >= 0x40 */
1970 tcg_gen_shli_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x39);
1971 tcg_gen_sari_tl(t0
, t0
, 0x3f);
1972 tcg_gen_andc_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
1973 t1
= tcg_temp_new();
1974 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x3f);
1975 tcg_gen_shr_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
1978 if (unlikely(Rc(ctx
->opcode
) != 0))
1979 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
1983 /*** Floating-Point arithmetic ***/
1984 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1985 static void gen_f##name(DisasContext *ctx) \
1987 if (unlikely(!ctx->fpu_enabled)) { \
1988 gen_exception(ctx, POWERPC_EXCP_FPU); \
1991 /* NIP cannot be restored if the memory exception comes from an helper */ \
1992 gen_update_nip(ctx, ctx->nip - 4); \
1993 gen_reset_fpstatus(); \
1994 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1995 cpu_fpr[rA(ctx->opcode)], \
1996 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
1998 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
1999 cpu_fpr[rD(ctx->opcode)]); \
2001 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], set_fprf, \
2002 Rc(ctx->opcode) != 0); \
2005 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2006 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2007 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2009 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2010 static void gen_f##name(DisasContext *ctx) \
2012 if (unlikely(!ctx->fpu_enabled)) { \
2013 gen_exception(ctx, POWERPC_EXCP_FPU); \
2016 /* NIP cannot be restored if the memory exception comes from an helper */ \
2017 gen_update_nip(ctx, ctx->nip - 4); \
2018 gen_reset_fpstatus(); \
2019 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2020 cpu_fpr[rA(ctx->opcode)], \
2021 cpu_fpr[rB(ctx->opcode)]); \
2023 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2024 cpu_fpr[rD(ctx->opcode)]); \
2026 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2027 set_fprf, Rc(ctx->opcode) != 0); \
2029 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2030 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2031 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2033 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2034 static void gen_f##name(DisasContext *ctx) \
2036 if (unlikely(!ctx->fpu_enabled)) { \
2037 gen_exception(ctx, POWERPC_EXCP_FPU); \
2040 /* NIP cannot be restored if the memory exception comes from an helper */ \
2041 gen_update_nip(ctx, ctx->nip - 4); \
2042 gen_reset_fpstatus(); \
2043 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2044 cpu_fpr[rA(ctx->opcode)], \
2045 cpu_fpr[rC(ctx->opcode)]); \
2047 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2048 cpu_fpr[rD(ctx->opcode)]); \
2050 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2051 set_fprf, Rc(ctx->opcode) != 0); \
2053 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2054 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2055 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2057 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2058 static void gen_f##name(DisasContext *ctx) \
2060 if (unlikely(!ctx->fpu_enabled)) { \
2061 gen_exception(ctx, POWERPC_EXCP_FPU); \
2064 /* NIP cannot be restored if the memory exception comes from an helper */ \
2065 gen_update_nip(ctx, ctx->nip - 4); \
2066 gen_reset_fpstatus(); \
2067 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2068 cpu_fpr[rB(ctx->opcode)]); \
2069 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2070 set_fprf, Rc(ctx->opcode) != 0); \
2073 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2074 static void gen_f##name(DisasContext *ctx) \
2076 if (unlikely(!ctx->fpu_enabled)) { \
2077 gen_exception(ctx, POWERPC_EXCP_FPU); \
2080 /* NIP cannot be restored if the memory exception comes from an helper */ \
2081 gen_update_nip(ctx, ctx->nip - 4); \
2082 gen_reset_fpstatus(); \
2083 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
2084 cpu_fpr[rB(ctx->opcode)]); \
2085 gen_compute_fprf(cpu_fpr[rD(ctx->opcode)], \
2086 set_fprf, Rc(ctx->opcode) != 0); \
2090 GEN_FLOAT_AB(add
, 0x15, 0x000007C0, 1, PPC_FLOAT
);
2092 GEN_FLOAT_AB(div
, 0x12, 0x000007C0, 1, PPC_FLOAT
);
2094 GEN_FLOAT_AC(mul
, 0x19, 0x0000F800, 1, PPC_FLOAT
);
2097 GEN_FLOAT_BS(re
, 0x3F, 0x18, 1, PPC_FLOAT_EXT
);
2100 GEN_FLOAT_BS(res
, 0x3B, 0x18, 1, PPC_FLOAT_FRES
);
2103 GEN_FLOAT_BS(rsqrte
, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE
);
2106 static void gen_frsqrtes(DisasContext
*ctx
)
2108 if (unlikely(!ctx
->fpu_enabled
)) {
2109 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2112 /* NIP cannot be restored if the memory exception comes from an helper */
2113 gen_update_nip(ctx
, ctx
->nip
- 4);
2114 gen_reset_fpstatus();
2115 gen_helper_frsqrte(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2116 cpu_fpr
[rB(ctx
->opcode
)]);
2117 gen_helper_frsp(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2118 cpu_fpr
[rD(ctx
->opcode
)]);
2119 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 1, Rc(ctx
->opcode
) != 0);
2123 _GEN_FLOAT_ACB(sel
, sel
, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL
);
2125 GEN_FLOAT_AB(sub
, 0x14, 0x000007C0, 1, PPC_FLOAT
);
2129 static void gen_fsqrt(DisasContext
*ctx
)
2131 if (unlikely(!ctx
->fpu_enabled
)) {
2132 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2135 /* NIP cannot be restored if the memory exception comes from an helper */
2136 gen_update_nip(ctx
, ctx
->nip
- 4);
2137 gen_reset_fpstatus();
2138 gen_helper_fsqrt(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2139 cpu_fpr
[rB(ctx
->opcode
)]);
2140 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 1, Rc(ctx
->opcode
) != 0);
2143 static void gen_fsqrts(DisasContext
*ctx
)
2145 if (unlikely(!ctx
->fpu_enabled
)) {
2146 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2149 /* NIP cannot be restored if the memory exception comes from an helper */
2150 gen_update_nip(ctx
, ctx
->nip
- 4);
2151 gen_reset_fpstatus();
2152 gen_helper_fsqrt(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2153 cpu_fpr
[rB(ctx
->opcode
)]);
2154 gen_helper_frsp(cpu_fpr
[rD(ctx
->opcode
)], cpu_env
,
2155 cpu_fpr
[rD(ctx
->opcode
)]);
2156 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 1, Rc(ctx
->opcode
) != 0);
2159 /*** Floating-Point multiply-and-add ***/
2160 /* fmadd - fmadds */
2161 GEN_FLOAT_ACB(madd
, 0x1D, 1, PPC_FLOAT
);
2162 /* fmsub - fmsubs */
2163 GEN_FLOAT_ACB(msub
, 0x1C, 1, PPC_FLOAT
);
2164 /* fnmadd - fnmadds */
2165 GEN_FLOAT_ACB(nmadd
, 0x1F, 1, PPC_FLOAT
);
2166 /* fnmsub - fnmsubs */
2167 GEN_FLOAT_ACB(nmsub
, 0x1E, 1, PPC_FLOAT
);
2169 /*** Floating-Point round & convert ***/
2171 GEN_FLOAT_B(ctiw
, 0x0E, 0x00, 0, PPC_FLOAT
);
2173 GEN_FLOAT_B(ctiwz
, 0x0F, 0x00, 0, PPC_FLOAT
);
2175 GEN_FLOAT_B(rsp
, 0x0C, 0x00, 1, PPC_FLOAT
);
2176 #if defined(TARGET_PPC64)
2178 GEN_FLOAT_B(cfid
, 0x0E, 0x1A, 1, PPC_64B
);
2180 GEN_FLOAT_B(ctid
, 0x0E, 0x19, 0, PPC_64B
);
2182 GEN_FLOAT_B(ctidz
, 0x0F, 0x19, 0, PPC_64B
);
2186 GEN_FLOAT_B(rin
, 0x08, 0x0C, 1, PPC_FLOAT_EXT
);
2188 GEN_FLOAT_B(riz
, 0x08, 0x0D, 1, PPC_FLOAT_EXT
);
2190 GEN_FLOAT_B(rip
, 0x08, 0x0E, 1, PPC_FLOAT_EXT
);
2192 GEN_FLOAT_B(rim
, 0x08, 0x0F, 1, PPC_FLOAT_EXT
);
2194 /*** Floating-Point compare ***/
2197 static void gen_fcmpo(DisasContext
*ctx
)
2200 if (unlikely(!ctx
->fpu_enabled
)) {
2201 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2204 /* NIP cannot be restored if the memory exception comes from an helper */
2205 gen_update_nip(ctx
, ctx
->nip
- 4);
2206 gen_reset_fpstatus();
2207 crf
= tcg_const_i32(crfD(ctx
->opcode
));
2208 gen_helper_fcmpo(cpu_env
, cpu_fpr
[rA(ctx
->opcode
)],
2209 cpu_fpr
[rB(ctx
->opcode
)], crf
);
2210 tcg_temp_free_i32(crf
);
2211 gen_helper_float_check_status(cpu_env
);
2215 static void gen_fcmpu(DisasContext
*ctx
)
2218 if (unlikely(!ctx
->fpu_enabled
)) {
2219 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2222 /* NIP cannot be restored if the memory exception comes from an helper */
2223 gen_update_nip(ctx
, ctx
->nip
- 4);
2224 gen_reset_fpstatus();
2225 crf
= tcg_const_i32(crfD(ctx
->opcode
));
2226 gen_helper_fcmpu(cpu_env
, cpu_fpr
[rA(ctx
->opcode
)],
2227 cpu_fpr
[rB(ctx
->opcode
)], crf
);
2228 tcg_temp_free_i32(crf
);
2229 gen_helper_float_check_status(cpu_env
);
2232 /*** Floating-point move ***/
2234 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2235 static void gen_fabs(DisasContext
*ctx
)
2237 if (unlikely(!ctx
->fpu_enabled
)) {
2238 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2241 tcg_gen_andi_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2243 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2247 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2248 static void gen_fmr(DisasContext
*ctx
)
2250 if (unlikely(!ctx
->fpu_enabled
)) {
2251 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2254 tcg_gen_mov_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)]);
2255 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2259 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2260 static void gen_fnabs(DisasContext
*ctx
)
2262 if (unlikely(!ctx
->fpu_enabled
)) {
2263 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2266 tcg_gen_ori_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2268 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2272 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2273 static void gen_fneg(DisasContext
*ctx
)
2275 if (unlikely(!ctx
->fpu_enabled
)) {
2276 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2279 tcg_gen_xori_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rB(ctx
->opcode
)],
2281 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2284 /* fcpsgn: PowerPC 2.05 specification */
2285 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
2286 static void gen_fcpsgn(DisasContext
*ctx
)
2288 if (unlikely(!ctx
->fpu_enabled
)) {
2289 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2292 tcg_gen_deposit_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpr
[rA(ctx
->opcode
)],
2293 cpu_fpr
[rB(ctx
->opcode
)], 0, 63);
2294 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2297 /*** Floating-Point status & ctrl register ***/
2300 static void gen_mcrfs(DisasContext
*ctx
)
2302 TCGv tmp
= tcg_temp_new();
2305 if (unlikely(!ctx
->fpu_enabled
)) {
2306 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2309 bfa
= 4 * (7 - crfS(ctx
->opcode
));
2310 tcg_gen_shri_tl(tmp
, cpu_fpscr
, bfa
);
2311 tcg_gen_trunc_tl_i32(cpu_crf
[crfD(ctx
->opcode
)], tmp
);
2313 tcg_gen_andi_i32(cpu_crf
[crfD(ctx
->opcode
)], cpu_crf
[crfD(ctx
->opcode
)], 0xf);
2314 tcg_gen_andi_tl(cpu_fpscr
, cpu_fpscr
, ~(0xF << bfa
));
2318 static void gen_mffs(DisasContext
*ctx
)
2320 if (unlikely(!ctx
->fpu_enabled
)) {
2321 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2324 gen_reset_fpstatus();
2325 tcg_gen_extu_tl_i64(cpu_fpr
[rD(ctx
->opcode
)], cpu_fpscr
);
2326 gen_compute_fprf(cpu_fpr
[rD(ctx
->opcode
)], 0, Rc(ctx
->opcode
) != 0);
2330 static void gen_mtfsb0(DisasContext
*ctx
)
2334 if (unlikely(!ctx
->fpu_enabled
)) {
2335 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2338 crb
= 31 - crbD(ctx
->opcode
);
2339 gen_reset_fpstatus();
2340 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
)) {
2342 /* NIP cannot be restored if the memory exception comes from an helper */
2343 gen_update_nip(ctx
, ctx
->nip
- 4);
2344 t0
= tcg_const_i32(crb
);
2345 gen_helper_fpscr_clrbit(cpu_env
, t0
);
2346 tcg_temp_free_i32(t0
);
2348 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2349 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2350 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2355 static void gen_mtfsb1(DisasContext
*ctx
)
2359 if (unlikely(!ctx
->fpu_enabled
)) {
2360 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2363 crb
= 31 - crbD(ctx
->opcode
);
2364 gen_reset_fpstatus();
2365 /* XXX: we pretend we can only do IEEE floating-point computations */
2366 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
&& crb
!= FPSCR_NI
)) {
2368 /* NIP cannot be restored if the memory exception comes from an helper */
2369 gen_update_nip(ctx
, ctx
->nip
- 4);
2370 t0
= tcg_const_i32(crb
);
2371 gen_helper_fpscr_setbit(cpu_env
, t0
);
2372 tcg_temp_free_i32(t0
);
2374 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2375 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2376 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2378 /* We can raise a differed exception */
2379 gen_helper_float_check_status(cpu_env
);
2383 static void gen_mtfsf(DisasContext
*ctx
)
2388 if (unlikely(!ctx
->fpu_enabled
)) {
2389 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2392 flm
= FPFLM(ctx
->opcode
);
2393 l
= FPL(ctx
->opcode
);
2394 w
= FPW(ctx
->opcode
);
2395 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
2396 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2399 /* NIP cannot be restored if the memory exception comes from an helper */
2400 gen_update_nip(ctx
, ctx
->nip
- 4);
2401 gen_reset_fpstatus();
2403 t0
= tcg_const_i32((ctx
->insns_flags2
& PPC2_ISA205
) ? 0xffff : 0xff);
2405 t0
= tcg_const_i32(flm
<< (w
* 8));
2407 gen_helper_store_fpscr(cpu_env
, cpu_fpr
[rB(ctx
->opcode
)], t0
);
2408 tcg_temp_free_i32(t0
);
2409 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2410 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2411 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2413 /* We can raise a differed exception */
2414 gen_helper_float_check_status(cpu_env
);
2418 static void gen_mtfsfi(DisasContext
*ctx
)
2424 if (unlikely(!ctx
->fpu_enabled
)) {
2425 gen_exception(ctx
, POWERPC_EXCP_FPU
);
2428 w
= FPW(ctx
->opcode
);
2429 bf
= FPBF(ctx
->opcode
);
2430 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
2431 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2434 sh
= (8 * w
) + 7 - bf
;
2435 /* NIP cannot be restored if the memory exception comes from an helper */
2436 gen_update_nip(ctx
, ctx
->nip
- 4);
2437 gen_reset_fpstatus();
2438 t0
= tcg_const_i64(((uint64_t)FPIMM(ctx
->opcode
)) << (4 * sh
));
2439 t1
= tcg_const_i32(1 << sh
);
2440 gen_helper_store_fpscr(cpu_env
, t0
, t1
);
2441 tcg_temp_free_i64(t0
);
2442 tcg_temp_free_i32(t1
);
2443 if (unlikely(Rc(ctx
->opcode
) != 0)) {
2444 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
2445 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
2447 /* We can raise a differed exception */
2448 gen_helper_float_check_status(cpu_env
);
2451 /*** Addressing modes ***/
2452 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2453 static inline void gen_addr_imm_index(DisasContext
*ctx
, TCGv EA
,
2456 target_long simm
= SIMM(ctx
->opcode
);
2459 if (rA(ctx
->opcode
) == 0) {
2460 if (NARROW_MODE(ctx
)) {
2461 simm
= (uint32_t)simm
;
2463 tcg_gen_movi_tl(EA
, simm
);
2464 } else if (likely(simm
!= 0)) {
2465 tcg_gen_addi_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], simm
);
2466 if (NARROW_MODE(ctx
)) {
2467 tcg_gen_ext32u_tl(EA
, EA
);
2470 if (NARROW_MODE(ctx
)) {
2471 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2473 tcg_gen_mov_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2478 static inline void gen_addr_reg_index(DisasContext
*ctx
, TCGv EA
)
2480 if (rA(ctx
->opcode
) == 0) {
2481 if (NARROW_MODE(ctx
)) {
2482 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rB(ctx
->opcode
)]);
2484 tcg_gen_mov_tl(EA
, cpu_gpr
[rB(ctx
->opcode
)]);
2487 tcg_gen_add_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
2488 if (NARROW_MODE(ctx
)) {
2489 tcg_gen_ext32u_tl(EA
, EA
);
2494 static inline void gen_addr_register(DisasContext
*ctx
, TCGv EA
)
2496 if (rA(ctx
->opcode
) == 0) {
2497 tcg_gen_movi_tl(EA
, 0);
2498 } else if (NARROW_MODE(ctx
)) {
2499 tcg_gen_ext32u_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2501 tcg_gen_mov_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)]);
2505 static inline void gen_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg1
,
2508 tcg_gen_addi_tl(ret
, arg1
, val
);
2509 if (NARROW_MODE(ctx
)) {
2510 tcg_gen_ext32u_tl(ret
, ret
);
2514 static inline void gen_check_align(DisasContext
*ctx
, TCGv EA
, int mask
)
2516 int l1
= gen_new_label();
2517 TCGv t0
= tcg_temp_new();
2519 /* NIP cannot be restored if the memory exception comes from an helper */
2520 gen_update_nip(ctx
, ctx
->nip
- 4);
2521 tcg_gen_andi_tl(t0
, EA
, mask
);
2522 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
2523 t1
= tcg_const_i32(POWERPC_EXCP_ALIGN
);
2524 t2
= tcg_const_i32(0);
2525 gen_helper_raise_exception_err(cpu_env
, t1
, t2
);
2526 tcg_temp_free_i32(t1
);
2527 tcg_temp_free_i32(t2
);
2532 /*** Integer load ***/
2533 static inline void gen_qemu_ld8u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2535 tcg_gen_qemu_ld8u(arg1
, arg2
, ctx
->mem_idx
);
2538 static inline void gen_qemu_ld8s(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2540 tcg_gen_qemu_ld8s(arg1
, arg2
, ctx
->mem_idx
);
2543 static inline void gen_qemu_ld16u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2545 tcg_gen_qemu_ld16u(arg1
, arg2
, ctx
->mem_idx
);
2546 if (unlikely(ctx
->le_mode
)) {
2547 tcg_gen_bswap16_tl(arg1
, arg1
);
2551 static inline void gen_qemu_ld16s(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2553 if (unlikely(ctx
->le_mode
)) {
2554 tcg_gen_qemu_ld16u(arg1
, arg2
, ctx
->mem_idx
);
2555 tcg_gen_bswap16_tl(arg1
, arg1
);
2556 tcg_gen_ext16s_tl(arg1
, arg1
);
2558 tcg_gen_qemu_ld16s(arg1
, arg2
, ctx
->mem_idx
);
2562 static inline void gen_qemu_ld32u(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2564 tcg_gen_qemu_ld32u(arg1
, arg2
, ctx
->mem_idx
);
2565 if (unlikely(ctx
->le_mode
)) {
2566 tcg_gen_bswap32_tl(arg1
, arg1
);
2570 static void gen_qemu_ld32u_i64(DisasContext
*ctx
, TCGv_i64 val
, TCGv addr
)
2572 TCGv tmp
= tcg_temp_new();
2573 gen_qemu_ld32u(ctx
, tmp
, addr
);
2574 tcg_gen_extu_tl_i64(val
, tmp
);
2578 static inline void gen_qemu_ld32s(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2580 if (unlikely(ctx
->le_mode
)) {
2581 tcg_gen_qemu_ld32u(arg1
, arg2
, ctx
->mem_idx
);
2582 tcg_gen_bswap32_tl(arg1
, arg1
);
2583 tcg_gen_ext32s_tl(arg1
, arg1
);
2585 tcg_gen_qemu_ld32s(arg1
, arg2
, ctx
->mem_idx
);
2588 static inline void gen_qemu_ld64(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
2590 tcg_gen_qemu_ld64(arg1
, arg2
, ctx
->mem_idx
);
2591 if (unlikely(ctx
->le_mode
)) {
2592 tcg_gen_bswap64_i64(arg1
, arg1
);
2596 static inline void gen_qemu_st8(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2598 tcg_gen_qemu_st8(arg1
, arg2
, ctx
->mem_idx
);
2601 static inline void gen_qemu_st16(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2603 if (unlikely(ctx
->le_mode
)) {
2604 TCGv t0
= tcg_temp_new();
2605 tcg_gen_ext16u_tl(t0
, arg1
);
2606 tcg_gen_bswap16_tl(t0
, t0
);
2607 tcg_gen_qemu_st16(t0
, arg2
, ctx
->mem_idx
);
2610 tcg_gen_qemu_st16(arg1
, arg2
, ctx
->mem_idx
);
2614 static inline void gen_qemu_st32(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2616 if (unlikely(ctx
->le_mode
)) {
2617 TCGv t0
= tcg_temp_new();
2618 tcg_gen_ext32u_tl(t0
, arg1
);
2619 tcg_gen_bswap32_tl(t0
, t0
);
2620 tcg_gen_qemu_st32(t0
, arg2
, ctx
->mem_idx
);
2623 tcg_gen_qemu_st32(arg1
, arg2
, ctx
->mem_idx
);
2627 static void gen_qemu_st32_i64(DisasContext
*ctx
, TCGv_i64 val
, TCGv addr
)
2629 TCGv tmp
= tcg_temp_new();
2630 tcg_gen_trunc_i64_tl(tmp
, val
);
2631 gen_qemu_st32(ctx
, tmp
, addr
);
2635 static inline void gen_qemu_st64(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
2637 if (unlikely(ctx
->le_mode
)) {
2638 TCGv_i64 t0
= tcg_temp_new_i64();
2639 tcg_gen_bswap64_i64(t0
, arg1
);
2640 tcg_gen_qemu_st64(t0
, arg2
, ctx
->mem_idx
);
2641 tcg_temp_free_i64(t0
);
2643 tcg_gen_qemu_st64(arg1
, arg2
, ctx
->mem_idx
);
2646 #define GEN_LD(name, ldop, opc, type) \
2647 static void glue(gen_, name)(DisasContext *ctx) \
2650 gen_set_access_type(ctx, ACCESS_INT); \
2651 EA = tcg_temp_new(); \
2652 gen_addr_imm_index(ctx, EA, 0); \
2653 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2654 tcg_temp_free(EA); \
2657 #define GEN_LDU(name, ldop, opc, type) \
2658 static void glue(gen_, name##u)(DisasContext *ctx) \
2661 if (unlikely(rA(ctx->opcode) == 0 || \
2662 rA(ctx->opcode) == rD(ctx->opcode))) { \
2663 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2666 gen_set_access_type(ctx, ACCESS_INT); \
2667 EA = tcg_temp_new(); \
2668 if (type == PPC_64B) \
2669 gen_addr_imm_index(ctx, EA, 0x03); \
2671 gen_addr_imm_index(ctx, EA, 0); \
2672 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2673 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2674 tcg_temp_free(EA); \
2677 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
2678 static void glue(gen_, name##ux)(DisasContext *ctx) \
2681 if (unlikely(rA(ctx->opcode) == 0 || \
2682 rA(ctx->opcode) == rD(ctx->opcode))) { \
2683 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2686 gen_set_access_type(ctx, ACCESS_INT); \
2687 EA = tcg_temp_new(); \
2688 gen_addr_reg_index(ctx, EA); \
2689 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2690 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2691 tcg_temp_free(EA); \
2694 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
2695 static void glue(gen_, name##x)(DisasContext *ctx) \
2698 gen_set_access_type(ctx, ACCESS_INT); \
2699 EA = tcg_temp_new(); \
2700 gen_addr_reg_index(ctx, EA); \
2701 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
2702 tcg_temp_free(EA); \
2704 #define GEN_LDX(name, ldop, opc2, opc3, type) \
2705 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE)
2707 #define GEN_LDS(name, ldop, op, type) \
2708 GEN_LD(name, ldop, op | 0x20, type); \
2709 GEN_LDU(name, ldop, op | 0x21, type); \
2710 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
2711 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
2713 /* lbz lbzu lbzux lbzx */
2714 GEN_LDS(lbz
, ld8u
, 0x02, PPC_INTEGER
);
2715 /* lha lhau lhaux lhax */
2716 GEN_LDS(lha
, ld16s
, 0x0A, PPC_INTEGER
);
2717 /* lhz lhzu lhzux lhzx */
2718 GEN_LDS(lhz
, ld16u
, 0x08, PPC_INTEGER
);
2719 /* lwz lwzu lwzux lwzx */
2720 GEN_LDS(lwz
, ld32u
, 0x00, PPC_INTEGER
);
2721 #if defined(TARGET_PPC64)
2723 GEN_LDUX(lwa
, ld32s
, 0x15, 0x0B, PPC_64B
);
2725 GEN_LDX(lwa
, ld32s
, 0x15, 0x0A, PPC_64B
);
2727 GEN_LDUX(ld
, ld64
, 0x15, 0x01, PPC_64B
);
2729 GEN_LDX(ld
, ld64
, 0x15, 0x00, PPC_64B
);
2731 static void gen_ld(DisasContext
*ctx
)
2734 if (Rc(ctx
->opcode
)) {
2735 if (unlikely(rA(ctx
->opcode
) == 0 ||
2736 rA(ctx
->opcode
) == rD(ctx
->opcode
))) {
2737 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2741 gen_set_access_type(ctx
, ACCESS_INT
);
2742 EA
= tcg_temp_new();
2743 gen_addr_imm_index(ctx
, EA
, 0x03);
2744 if (ctx
->opcode
& 0x02) {
2745 /* lwa (lwau is undefined) */
2746 gen_qemu_ld32s(ctx
, cpu_gpr
[rD(ctx
->opcode
)], EA
);
2749 gen_qemu_ld64(ctx
, cpu_gpr
[rD(ctx
->opcode
)], EA
);
2751 if (Rc(ctx
->opcode
))
2752 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], EA
);
2757 static void gen_lq(DisasContext
*ctx
)
2759 #if defined(CONFIG_USER_ONLY)
2760 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
2765 /* Restore CPU state */
2766 if (unlikely(ctx
->mem_idx
== 0)) {
2767 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
2770 ra
= rA(ctx
->opcode
);
2771 rd
= rD(ctx
->opcode
);
2772 if (unlikely((rd
& 1) || rd
== ra
)) {
2773 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2776 if (unlikely(ctx
->le_mode
)) {
2777 /* Little-endian mode is not handled */
2778 gen_exception_err(ctx
, POWERPC_EXCP_ALIGN
, POWERPC_EXCP_ALIGN_LE
);
2781 gen_set_access_type(ctx
, ACCESS_INT
);
2782 EA
= tcg_temp_new();
2783 gen_addr_imm_index(ctx
, EA
, 0x0F);
2784 gen_qemu_ld64(ctx
, cpu_gpr
[rd
], EA
);
2785 gen_addr_add(ctx
, EA
, EA
, 8);
2786 gen_qemu_ld64(ctx
, cpu_gpr
[rd
+1], EA
);
2792 /*** Integer store ***/
2793 #define GEN_ST(name, stop, opc, type) \
2794 static void glue(gen_, name)(DisasContext *ctx) \
2797 gen_set_access_type(ctx, ACCESS_INT); \
2798 EA = tcg_temp_new(); \
2799 gen_addr_imm_index(ctx, EA, 0); \
2800 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2801 tcg_temp_free(EA); \
2804 #define GEN_STU(name, stop, opc, type) \
2805 static void glue(gen_, stop##u)(DisasContext *ctx) \
2808 if (unlikely(rA(ctx->opcode) == 0)) { \
2809 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2812 gen_set_access_type(ctx, ACCESS_INT); \
2813 EA = tcg_temp_new(); \
2814 if (type == PPC_64B) \
2815 gen_addr_imm_index(ctx, EA, 0x03); \
2817 gen_addr_imm_index(ctx, EA, 0); \
2818 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2819 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2820 tcg_temp_free(EA); \
2823 #define GEN_STUX(name, stop, opc2, opc3, type) \
2824 static void glue(gen_, name##ux)(DisasContext *ctx) \
2827 if (unlikely(rA(ctx->opcode) == 0)) { \
2828 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
2831 gen_set_access_type(ctx, ACCESS_INT); \
2832 EA = tcg_temp_new(); \
2833 gen_addr_reg_index(ctx, EA); \
2834 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2835 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2836 tcg_temp_free(EA); \
2839 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
2840 static void glue(gen_, name##x)(DisasContext *ctx) \
2843 gen_set_access_type(ctx, ACCESS_INT); \
2844 EA = tcg_temp_new(); \
2845 gen_addr_reg_index(ctx, EA); \
2846 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
2847 tcg_temp_free(EA); \
2849 #define GEN_STX(name, stop, opc2, opc3, type) \
2850 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE)
2852 #define GEN_STS(name, stop, op, type) \
2853 GEN_ST(name, stop, op | 0x20, type); \
2854 GEN_STU(name, stop, op | 0x21, type); \
2855 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
2856 GEN_STX(name, stop, 0x17, op | 0x00, type)
2858 /* stb stbu stbux stbx */
2859 GEN_STS(stb
, st8
, 0x06, PPC_INTEGER
);
2860 /* sth sthu sthux sthx */
2861 GEN_STS(sth
, st16
, 0x0C, PPC_INTEGER
);
2862 /* stw stwu stwux stwx */
2863 GEN_STS(stw
, st32
, 0x04, PPC_INTEGER
);
2864 #if defined(TARGET_PPC64)
2865 GEN_STUX(std
, st64
, 0x15, 0x05, PPC_64B
);
2866 GEN_STX(std
, st64
, 0x15, 0x04, PPC_64B
);
2868 static void gen_std(DisasContext
*ctx
)
2873 rs
= rS(ctx
->opcode
);
2874 if ((ctx
->opcode
& 0x3) == 0x2) {
2875 #if defined(CONFIG_USER_ONLY)
2876 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
2879 if (unlikely(ctx
->mem_idx
== 0)) {
2880 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
2883 if (unlikely(rs
& 1)) {
2884 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2887 if (unlikely(ctx
->le_mode
)) {
2888 /* Little-endian mode is not handled */
2889 gen_exception_err(ctx
, POWERPC_EXCP_ALIGN
, POWERPC_EXCP_ALIGN_LE
);
2892 gen_set_access_type(ctx
, ACCESS_INT
);
2893 EA
= tcg_temp_new();
2894 gen_addr_imm_index(ctx
, EA
, 0x03);
2895 gen_qemu_st64(ctx
, cpu_gpr
[rs
], EA
);
2896 gen_addr_add(ctx
, EA
, EA
, 8);
2897 gen_qemu_st64(ctx
, cpu_gpr
[rs
+1], EA
);
2902 if (Rc(ctx
->opcode
)) {
2903 if (unlikely(rA(ctx
->opcode
) == 0)) {
2904 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
2908 gen_set_access_type(ctx
, ACCESS_INT
);
2909 EA
= tcg_temp_new();
2910 gen_addr_imm_index(ctx
, EA
, 0x03);
2911 gen_qemu_st64(ctx
, cpu_gpr
[rs
], EA
);
2912 if (Rc(ctx
->opcode
))
2913 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], EA
);
2918 /*** Integer load and store with byte reverse ***/
2920 static inline void gen_qemu_ld16ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2922 tcg_gen_qemu_ld16u(arg1
, arg2
, ctx
->mem_idx
);
2923 if (likely(!ctx
->le_mode
)) {
2924 tcg_gen_bswap16_tl(arg1
, arg1
);
2927 GEN_LDX(lhbr
, ld16ur
, 0x16, 0x18, PPC_INTEGER
);
2930 static inline void gen_qemu_ld32ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2932 tcg_gen_qemu_ld32u(arg1
, arg2
, ctx
->mem_idx
);
2933 if (likely(!ctx
->le_mode
)) {
2934 tcg_gen_bswap32_tl(arg1
, arg1
);
2937 GEN_LDX(lwbr
, ld32ur
, 0x16, 0x10, PPC_INTEGER
);
2939 #if defined(TARGET_PPC64)
2941 static inline void gen_qemu_ld64ur(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2943 tcg_gen_qemu_ld64(arg1
, arg2
, ctx
->mem_idx
);
2944 if (likely(!ctx
->le_mode
)) {
2945 tcg_gen_bswap64_tl(arg1
, arg1
);
2948 GEN_LDX_E(ldbr
, ld64ur
, 0x14, 0x10, PPC_NONE
, PPC2_DBRX
);
2949 #endif /* TARGET_PPC64 */
2952 static inline void gen_qemu_st16r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2954 if (likely(!ctx
->le_mode
)) {
2955 TCGv t0
= tcg_temp_new();
2956 tcg_gen_ext16u_tl(t0
, arg1
);
2957 tcg_gen_bswap16_tl(t0
, t0
);
2958 tcg_gen_qemu_st16(t0
, arg2
, ctx
->mem_idx
);
2961 tcg_gen_qemu_st16(arg1
, arg2
, ctx
->mem_idx
);
2964 GEN_STX(sthbr
, st16r
, 0x16, 0x1C, PPC_INTEGER
);
2967 static inline void gen_qemu_st32r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2969 if (likely(!ctx
->le_mode
)) {
2970 TCGv t0
= tcg_temp_new();
2971 tcg_gen_ext32u_tl(t0
, arg1
);
2972 tcg_gen_bswap32_tl(t0
, t0
);
2973 tcg_gen_qemu_st32(t0
, arg2
, ctx
->mem_idx
);
2976 tcg_gen_qemu_st32(arg1
, arg2
, ctx
->mem_idx
);
2979 GEN_STX(stwbr
, st32r
, 0x16, 0x14, PPC_INTEGER
);
2981 #if defined(TARGET_PPC64)
2983 static inline void gen_qemu_st64r(DisasContext
*ctx
, TCGv arg1
, TCGv arg2
)
2985 if (likely(!ctx
->le_mode
)) {
2986 TCGv t0
= tcg_temp_new();
2987 tcg_gen_bswap64_tl(t0
, arg1
);
2988 tcg_gen_qemu_st64(t0
, arg2
, ctx
->mem_idx
);
2991 tcg_gen_qemu_st64(arg1
, arg2
, ctx
->mem_idx
);
2994 GEN_STX_E(stdbr
, st64r
, 0x14, 0x14, PPC_NONE
, PPC2_DBRX
);
2995 #endif /* TARGET_PPC64 */
2997 /*** Integer load and store multiple ***/
3000 static void gen_lmw(DisasContext
*ctx
)
3004 gen_set_access_type(ctx
, ACCESS_INT
);
3005 /* NIP cannot be restored if the memory exception comes from an helper */
3006 gen_update_nip(ctx
, ctx
->nip
- 4);
3007 t0
= tcg_temp_new();
3008 t1
= tcg_const_i32(rD(ctx
->opcode
));
3009 gen_addr_imm_index(ctx
, t0
, 0);
3010 gen_helper_lmw(cpu_env
, t0
, t1
);
3012 tcg_temp_free_i32(t1
);
3016 static void gen_stmw(DisasContext
*ctx
)
3020 gen_set_access_type(ctx
, ACCESS_INT
);
3021 /* NIP cannot be restored if the memory exception comes from an helper */
3022 gen_update_nip(ctx
, ctx
->nip
- 4);
3023 t0
= tcg_temp_new();
3024 t1
= tcg_const_i32(rS(ctx
->opcode
));
3025 gen_addr_imm_index(ctx
, t0
, 0);
3026 gen_helper_stmw(cpu_env
, t0
, t1
);
3028 tcg_temp_free_i32(t1
);
3031 /*** Integer load and store strings ***/
3034 /* PowerPC32 specification says we must generate an exception if
3035 * rA is in the range of registers to be loaded.
3036 * In an other hand, IBM says this is valid, but rA won't be loaded.
3037 * For now, I'll follow the spec...
3039 static void gen_lswi(DisasContext
*ctx
)
3043 int nb
= NB(ctx
->opcode
);
3044 int start
= rD(ctx
->opcode
);
3045 int ra
= rA(ctx
->opcode
);
3051 if (unlikely(((start
+ nr
) > 32 &&
3052 start
<= ra
&& (start
+ nr
- 32) > ra
) ||
3053 ((start
+ nr
) <= 32 && start
<= ra
&& (start
+ nr
) > ra
))) {
3054 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
3057 gen_set_access_type(ctx
, ACCESS_INT
);
3058 /* NIP cannot be restored if the memory exception comes from an helper */
3059 gen_update_nip(ctx
, ctx
->nip
- 4);
3060 t0
= tcg_temp_new();
3061 gen_addr_register(ctx
, t0
);
3062 t1
= tcg_const_i32(nb
);
3063 t2
= tcg_const_i32(start
);
3064 gen_helper_lsw(cpu_env
, t0
, t1
, t2
);
3066 tcg_temp_free_i32(t1
);
3067 tcg_temp_free_i32(t2
);
3071 static void gen_lswx(DisasContext
*ctx
)
3074 TCGv_i32 t1
, t2
, t3
;
3075 gen_set_access_type(ctx
, ACCESS_INT
);
3076 /* NIP cannot be restored if the memory exception comes from an helper */
3077 gen_update_nip(ctx
, ctx
->nip
- 4);
3078 t0
= tcg_temp_new();
3079 gen_addr_reg_index(ctx
, t0
);
3080 t1
= tcg_const_i32(rD(ctx
->opcode
));
3081 t2
= tcg_const_i32(rA(ctx
->opcode
));
3082 t3
= tcg_const_i32(rB(ctx
->opcode
));
3083 gen_helper_lswx(cpu_env
, t0
, t1
, t2
, t3
);
3085 tcg_temp_free_i32(t1
);
3086 tcg_temp_free_i32(t2
);
3087 tcg_temp_free_i32(t3
);
3091 static void gen_stswi(DisasContext
*ctx
)
3095 int nb
= NB(ctx
->opcode
);
3096 gen_set_access_type(ctx
, ACCESS_INT
);
3097 /* NIP cannot be restored if the memory exception comes from an helper */
3098 gen_update_nip(ctx
, ctx
->nip
- 4);
3099 t0
= tcg_temp_new();
3100 gen_addr_register(ctx
, t0
);
3103 t1
= tcg_const_i32(nb
);
3104 t2
= tcg_const_i32(rS(ctx
->opcode
));
3105 gen_helper_stsw(cpu_env
, t0
, t1
, t2
);
3107 tcg_temp_free_i32(t1
);
3108 tcg_temp_free_i32(t2
);
3112 static void gen_stswx(DisasContext
*ctx
)
3116 gen_set_access_type(ctx
, ACCESS_INT
);
3117 /* NIP cannot be restored if the memory exception comes from an helper */
3118 gen_update_nip(ctx
, ctx
->nip
- 4);
3119 t0
= tcg_temp_new();
3120 gen_addr_reg_index(ctx
, t0
);
3121 t1
= tcg_temp_new_i32();
3122 tcg_gen_trunc_tl_i32(t1
, cpu_xer
);
3123 tcg_gen_andi_i32(t1
, t1
, 0x7F);
3124 t2
= tcg_const_i32(rS(ctx
->opcode
));
3125 gen_helper_stsw(cpu_env
, t0
, t1
, t2
);
3127 tcg_temp_free_i32(t1
);
3128 tcg_temp_free_i32(t2
);
3131 /*** Memory synchronisation ***/
3133 static void gen_eieio(DisasContext
*ctx
)
3138 static void gen_isync(DisasContext
*ctx
)
3140 gen_stop_exception(ctx
);
3144 static void gen_lwarx(DisasContext
*ctx
)
3147 TCGv gpr
= cpu_gpr
[rD(ctx
->opcode
)];
3148 gen_set_access_type(ctx
, ACCESS_RES
);
3149 t0
= tcg_temp_local_new();
3150 gen_addr_reg_index(ctx
, t0
);
3151 gen_check_align(ctx
, t0
, 0x03);
3152 gen_qemu_ld32u(ctx
, gpr
, t0
);
3153 tcg_gen_mov_tl(cpu_reserve
, t0
);
3154 tcg_gen_st_tl(gpr
, cpu_env
, offsetof(CPUPPCState
, reserve_val
));
3158 #if defined(CONFIG_USER_ONLY)
3159 static void gen_conditional_store (DisasContext
*ctx
, TCGv EA
,
3162 TCGv t0
= tcg_temp_new();
3163 uint32_t save_exception
= ctx
->exception
;
3165 tcg_gen_st_tl(EA
, cpu_env
, offsetof(CPUPPCState
, reserve_ea
));
3166 tcg_gen_movi_tl(t0
, (size
<< 5) | reg
);
3167 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUPPCState
, reserve_info
));
3169 gen_update_nip(ctx
, ctx
->nip
-4);
3170 ctx
->exception
= POWERPC_EXCP_BRANCH
;
3171 gen_exception(ctx
, POWERPC_EXCP_STCX
);
3172 ctx
->exception
= save_exception
;
3177 static void gen_stwcx_(DisasContext
*ctx
)
3180 gen_set_access_type(ctx
, ACCESS_RES
);
3181 t0
= tcg_temp_local_new();
3182 gen_addr_reg_index(ctx
, t0
);
3183 gen_check_align(ctx
, t0
, 0x03);
3184 #if defined(CONFIG_USER_ONLY)
3185 gen_conditional_store(ctx
, t0
, rS(ctx
->opcode
), 4);
3190 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
3191 l1
= gen_new_label();
3192 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, cpu_reserve
, l1
);
3193 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 1 << CRF_EQ
);
3194 gen_qemu_st32(ctx
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
3196 tcg_gen_movi_tl(cpu_reserve
, -1);
3202 #if defined(TARGET_PPC64)
3204 static void gen_ldarx(DisasContext
*ctx
)
3207 TCGv gpr
= cpu_gpr
[rD(ctx
->opcode
)];
3208 gen_set_access_type(ctx
, ACCESS_RES
);
3209 t0
= tcg_temp_local_new();
3210 gen_addr_reg_index(ctx
, t0
);
3211 gen_check_align(ctx
, t0
, 0x07);
3212 gen_qemu_ld64(ctx
, gpr
, t0
);
3213 tcg_gen_mov_tl(cpu_reserve
, t0
);
3214 tcg_gen_st_tl(gpr
, cpu_env
, offsetof(CPUPPCState
, reserve_val
));
3219 static void gen_stdcx_(DisasContext
*ctx
)
3222 gen_set_access_type(ctx
, ACCESS_RES
);
3223 t0
= tcg_temp_local_new();
3224 gen_addr_reg_index(ctx
, t0
);
3225 gen_check_align(ctx
, t0
, 0x07);
3226 #if defined(CONFIG_USER_ONLY)
3227 gen_conditional_store(ctx
, t0
, rS(ctx
->opcode
), 8);
3231 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
3232 l1
= gen_new_label();
3233 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, cpu_reserve
, l1
);
3234 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 1 << CRF_EQ
);
3235 gen_qemu_st64(ctx
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
3237 tcg_gen_movi_tl(cpu_reserve
, -1);
3242 #endif /* defined(TARGET_PPC64) */
3245 static void gen_sync(DisasContext
*ctx
)
3250 static void gen_wait(DisasContext
*ctx
)
3252 TCGv_i32 t0
= tcg_temp_new_i32();
3253 tcg_gen_st_i32(t0
, cpu_env
,
3254 -offsetof(PowerPCCPU
, env
) + offsetof(CPUState
, halted
));
3255 tcg_temp_free_i32(t0
);
3256 /* Stop translation, as the CPU is supposed to sleep from now */
3257 gen_exception_err(ctx
, EXCP_HLT
, 1);
3260 /*** Floating-point load ***/
3261 #define GEN_LDF(name, ldop, opc, type) \
3262 static void glue(gen_, name)(DisasContext *ctx) \
3265 if (unlikely(!ctx->fpu_enabled)) { \
3266 gen_exception(ctx, POWERPC_EXCP_FPU); \
3269 gen_set_access_type(ctx, ACCESS_FLOAT); \
3270 EA = tcg_temp_new(); \
3271 gen_addr_imm_index(ctx, EA, 0); \
3272 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3273 tcg_temp_free(EA); \
3276 #define GEN_LDUF(name, ldop, opc, type) \
3277 static void glue(gen_, name##u)(DisasContext *ctx) \
3280 if (unlikely(!ctx->fpu_enabled)) { \
3281 gen_exception(ctx, POWERPC_EXCP_FPU); \
3284 if (unlikely(rA(ctx->opcode) == 0)) { \
3285 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3288 gen_set_access_type(ctx, ACCESS_FLOAT); \
3289 EA = tcg_temp_new(); \
3290 gen_addr_imm_index(ctx, EA, 0); \
3291 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3292 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3293 tcg_temp_free(EA); \
3296 #define GEN_LDUXF(name, ldop, opc, type) \
3297 static void glue(gen_, name##ux)(DisasContext *ctx) \
3300 if (unlikely(!ctx->fpu_enabled)) { \
3301 gen_exception(ctx, POWERPC_EXCP_FPU); \
3304 if (unlikely(rA(ctx->opcode) == 0)) { \
3305 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3308 gen_set_access_type(ctx, ACCESS_FLOAT); \
3309 EA = tcg_temp_new(); \
3310 gen_addr_reg_index(ctx, EA); \
3311 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3312 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3313 tcg_temp_free(EA); \
3316 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
3317 static void glue(gen_, name##x)(DisasContext *ctx) \
3320 if (unlikely(!ctx->fpu_enabled)) { \
3321 gen_exception(ctx, POWERPC_EXCP_FPU); \
3324 gen_set_access_type(ctx, ACCESS_FLOAT); \
3325 EA = tcg_temp_new(); \
3326 gen_addr_reg_index(ctx, EA); \
3327 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
3328 tcg_temp_free(EA); \
3331 #define GEN_LDFS(name, ldop, op, type) \
3332 GEN_LDF(name, ldop, op | 0x20, type); \
3333 GEN_LDUF(name, ldop, op | 0x21, type); \
3334 GEN_LDUXF(name, ldop, op | 0x01, type); \
3335 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
3337 static inline void gen_qemu_ld32fs(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3339 TCGv t0
= tcg_temp_new();
3340 TCGv_i32 t1
= tcg_temp_new_i32();
3341 gen_qemu_ld32u(ctx
, t0
, arg2
);
3342 tcg_gen_trunc_tl_i32(t1
, t0
);
3344 gen_helper_float32_to_float64(arg1
, cpu_env
, t1
);
3345 tcg_temp_free_i32(t1
);
3348 /* lfd lfdu lfdux lfdx */
3349 GEN_LDFS(lfd
, ld64
, 0x12, PPC_FLOAT
);
3350 /* lfs lfsu lfsux lfsx */
3351 GEN_LDFS(lfs
, ld32fs
, 0x10, PPC_FLOAT
);
3354 static void gen_lfdp(DisasContext
*ctx
)
3357 if (unlikely(!ctx
->fpu_enabled
)) {
3358 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3361 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3362 EA
= tcg_temp_new();
3363 gen_addr_imm_index(ctx
, EA
, 0); \
3364 if (unlikely(ctx
->le_mode
)) {
3365 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3366 tcg_gen_addi_tl(EA
, EA
, 8);
3367 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3369 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3370 tcg_gen_addi_tl(EA
, EA
, 8);
3371 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3377 static void gen_lfdpx(DisasContext
*ctx
)
3380 if (unlikely(!ctx
->fpu_enabled
)) {
3381 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3384 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3385 EA
= tcg_temp_new();
3386 gen_addr_reg_index(ctx
, EA
);
3387 if (unlikely(ctx
->le_mode
)) {
3388 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3389 tcg_gen_addi_tl(EA
, EA
, 8);
3390 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3392 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3393 tcg_gen_addi_tl(EA
, EA
, 8);
3394 gen_qemu_ld64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3400 static void gen_lfiwax(DisasContext
*ctx
)
3404 if (unlikely(!ctx
->fpu_enabled
)) {
3405 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3408 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3409 EA
= tcg_temp_new();
3410 t0
= tcg_temp_new();
3411 gen_addr_reg_index(ctx
, EA
);
3412 gen_qemu_ld32s(ctx
, t0
, EA
);
3413 tcg_gen_ext_tl_i64(cpu_fpr
[rD(ctx
->opcode
)], t0
);
3418 /*** Floating-point store ***/
3419 #define GEN_STF(name, stop, opc, type) \
3420 static void glue(gen_, name)(DisasContext *ctx) \
3423 if (unlikely(!ctx->fpu_enabled)) { \
3424 gen_exception(ctx, POWERPC_EXCP_FPU); \
3427 gen_set_access_type(ctx, ACCESS_FLOAT); \
3428 EA = tcg_temp_new(); \
3429 gen_addr_imm_index(ctx, EA, 0); \
3430 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3431 tcg_temp_free(EA); \
3434 #define GEN_STUF(name, stop, opc, type) \
3435 static void glue(gen_, name##u)(DisasContext *ctx) \
3438 if (unlikely(!ctx->fpu_enabled)) { \
3439 gen_exception(ctx, POWERPC_EXCP_FPU); \
3442 if (unlikely(rA(ctx->opcode) == 0)) { \
3443 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3446 gen_set_access_type(ctx, ACCESS_FLOAT); \
3447 EA = tcg_temp_new(); \
3448 gen_addr_imm_index(ctx, EA, 0); \
3449 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3450 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3451 tcg_temp_free(EA); \
3454 #define GEN_STUXF(name, stop, opc, type) \
3455 static void glue(gen_, name##ux)(DisasContext *ctx) \
3458 if (unlikely(!ctx->fpu_enabled)) { \
3459 gen_exception(ctx, POWERPC_EXCP_FPU); \
3462 if (unlikely(rA(ctx->opcode) == 0)) { \
3463 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3466 gen_set_access_type(ctx, ACCESS_FLOAT); \
3467 EA = tcg_temp_new(); \
3468 gen_addr_reg_index(ctx, EA); \
3469 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3470 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3471 tcg_temp_free(EA); \
3474 #define GEN_STXF(name, stop, opc2, opc3, type) \
3475 static void glue(gen_, name##x)(DisasContext *ctx) \
3478 if (unlikely(!ctx->fpu_enabled)) { \
3479 gen_exception(ctx, POWERPC_EXCP_FPU); \
3482 gen_set_access_type(ctx, ACCESS_FLOAT); \
3483 EA = tcg_temp_new(); \
3484 gen_addr_reg_index(ctx, EA); \
3485 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
3486 tcg_temp_free(EA); \
3489 #define GEN_STFS(name, stop, op, type) \
3490 GEN_STF(name, stop, op | 0x20, type); \
3491 GEN_STUF(name, stop, op | 0x21, type); \
3492 GEN_STUXF(name, stop, op | 0x01, type); \
3493 GEN_STXF(name, stop, 0x17, op | 0x00, type)
3495 static inline void gen_qemu_st32fs(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3497 TCGv_i32 t0
= tcg_temp_new_i32();
3498 TCGv t1
= tcg_temp_new();
3499 gen_helper_float64_to_float32(t0
, cpu_env
, arg1
);
3500 tcg_gen_extu_i32_tl(t1
, t0
);
3501 tcg_temp_free_i32(t0
);
3502 gen_qemu_st32(ctx
, t1
, arg2
);
3506 /* stfd stfdu stfdux stfdx */
3507 GEN_STFS(stfd
, st64
, 0x16, PPC_FLOAT
);
3508 /* stfs stfsu stfsux stfsx */
3509 GEN_STFS(stfs
, st32fs
, 0x14, PPC_FLOAT
);
3512 static void gen_stfdp(DisasContext
*ctx
)
3515 if (unlikely(!ctx
->fpu_enabled
)) {
3516 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3519 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3520 EA
= tcg_temp_new();
3521 gen_addr_imm_index(ctx
, EA
, 0); \
3522 if (unlikely(ctx
->le_mode
)) {
3523 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3524 tcg_gen_addi_tl(EA
, EA
, 8);
3525 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3527 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3528 tcg_gen_addi_tl(EA
, EA
, 8);
3529 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3535 static void gen_stfdpx(DisasContext
*ctx
)
3538 if (unlikely(!ctx
->fpu_enabled
)) {
3539 gen_exception(ctx
, POWERPC_EXCP_FPU
);
3542 gen_set_access_type(ctx
, ACCESS_FLOAT
);
3543 EA
= tcg_temp_new();
3544 gen_addr_reg_index(ctx
, EA
);
3545 if (unlikely(ctx
->le_mode
)) {
3546 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3547 tcg_gen_addi_tl(EA
, EA
, 8);
3548 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3550 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
)], EA
);
3551 tcg_gen_addi_tl(EA
, EA
, 8);
3552 gen_qemu_st64(ctx
, cpu_fpr
[rD(ctx
->opcode
) + 1], EA
);
3558 static inline void gen_qemu_st32fiw(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
3560 TCGv t0
= tcg_temp_new();
3561 tcg_gen_trunc_i64_tl(t0
, arg1
),
3562 gen_qemu_st32(ctx
, t0
, arg2
);
3566 GEN_STXF(stfiw
, st32fiw
, 0x17, 0x1E, PPC_FLOAT_STFIWX
);
3568 static inline void gen_update_cfar(DisasContext
*ctx
, target_ulong nip
)
3570 #if defined(TARGET_PPC64)
3572 tcg_gen_movi_tl(cpu_cfar
, nip
);
3577 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
3579 TranslationBlock
*tb
;
3581 if (NARROW_MODE(ctx
)) {
3582 dest
= (uint32_t) dest
;
3584 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
3585 likely(!ctx
->singlestep_enabled
)) {
3587 tcg_gen_movi_tl(cpu_nip
, dest
& ~3);
3588 tcg_gen_exit_tb((uintptr_t)tb
+ n
);
3590 tcg_gen_movi_tl(cpu_nip
, dest
& ~3);
3591 if (unlikely(ctx
->singlestep_enabled
)) {
3592 if ((ctx
->singlestep_enabled
&
3593 (CPU_BRANCH_STEP
| CPU_SINGLE_STEP
)) &&
3594 (ctx
->exception
== POWERPC_EXCP_BRANCH
||
3595 ctx
->exception
== POWERPC_EXCP_TRACE
)) {
3596 target_ulong tmp
= ctx
->nip
;
3598 gen_exception(ctx
, POWERPC_EXCP_TRACE
);
3601 if (ctx
->singlestep_enabled
& GDBSTUB_SINGLE_STEP
) {
3602 gen_debug_exception(ctx
);
3609 static inline void gen_setlr(DisasContext
*ctx
, target_ulong nip
)
3611 if (NARROW_MODE(ctx
)) {
3612 nip
= (uint32_t)nip
;
3614 tcg_gen_movi_tl(cpu_lr
, nip
);
3618 static void gen_b(DisasContext
*ctx
)
3620 target_ulong li
, target
;
3622 ctx
->exception
= POWERPC_EXCP_BRANCH
;
3623 /* sign extend LI */
3624 li
= LI(ctx
->opcode
);
3625 li
= (li
^ 0x02000000) - 0x02000000;
3626 if (likely(AA(ctx
->opcode
) == 0)) {
3627 target
= ctx
->nip
+ li
- 4;
3631 if (LK(ctx
->opcode
)) {
3632 gen_setlr(ctx
, ctx
->nip
);
3634 gen_update_cfar(ctx
, ctx
->nip
);
3635 gen_goto_tb(ctx
, 0, target
);
3642 static inline void gen_bcond(DisasContext
*ctx
, int type
)
3644 uint32_t bo
= BO(ctx
->opcode
);
3648 ctx
->exception
= POWERPC_EXCP_BRANCH
;
3649 if (type
== BCOND_LR
|| type
== BCOND_CTR
) {
3650 target
= tcg_temp_local_new();
3651 if (type
== BCOND_CTR
)
3652 tcg_gen_mov_tl(target
, cpu_ctr
);
3654 tcg_gen_mov_tl(target
, cpu_lr
);
3656 TCGV_UNUSED(target
);
3658 if (LK(ctx
->opcode
))
3659 gen_setlr(ctx
, ctx
->nip
);
3660 l1
= gen_new_label();
3661 if ((bo
& 0x4) == 0) {
3662 /* Decrement and test CTR */
3663 TCGv temp
= tcg_temp_new();
3664 if (unlikely(type
== BCOND_CTR
)) {
3665 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
3668 tcg_gen_subi_tl(cpu_ctr
, cpu_ctr
, 1);
3669 if (NARROW_MODE(ctx
)) {
3670 tcg_gen_ext32u_tl(temp
, cpu_ctr
);
3672 tcg_gen_mov_tl(temp
, cpu_ctr
);
3675 tcg_gen_brcondi_tl(TCG_COND_NE
, temp
, 0, l1
);
3677 tcg_gen_brcondi_tl(TCG_COND_EQ
, temp
, 0, l1
);
3679 tcg_temp_free(temp
);
3681 if ((bo
& 0x10) == 0) {
3683 uint32_t bi
= BI(ctx
->opcode
);
3684 uint32_t mask
= 1 << (3 - (bi
& 0x03));
3685 TCGv_i32 temp
= tcg_temp_new_i32();
3688 tcg_gen_andi_i32(temp
, cpu_crf
[bi
>> 2], mask
);
3689 tcg_gen_brcondi_i32(TCG_COND_EQ
, temp
, 0, l1
);
3691 tcg_gen_andi_i32(temp
, cpu_crf
[bi
>> 2], mask
);
3692 tcg_gen_brcondi_i32(TCG_COND_NE
, temp
, 0, l1
);
3694 tcg_temp_free_i32(temp
);
3696 gen_update_cfar(ctx
, ctx
->nip
);
3697 if (type
== BCOND_IM
) {
3698 target_ulong li
= (target_long
)((int16_t)(BD(ctx
->opcode
)));
3699 if (likely(AA(ctx
->opcode
) == 0)) {
3700 gen_goto_tb(ctx
, 0, ctx
->nip
+ li
- 4);
3702 gen_goto_tb(ctx
, 0, li
);
3705 gen_goto_tb(ctx
, 1, ctx
->nip
);
3707 if (NARROW_MODE(ctx
)) {
3708 tcg_gen_andi_tl(cpu_nip
, target
, (uint32_t)~3);
3710 tcg_gen_andi_tl(cpu_nip
, target
, ~3);
3714 gen_update_nip(ctx
, ctx
->nip
);
3719 static void gen_bc(DisasContext
*ctx
)
3721 gen_bcond(ctx
, BCOND_IM
);
3724 static void gen_bcctr(DisasContext
*ctx
)
3726 gen_bcond(ctx
, BCOND_CTR
);
3729 static void gen_bclr(DisasContext
*ctx
)
3731 gen_bcond(ctx
, BCOND_LR
);
3734 /*** Condition register logical ***/
3735 #define GEN_CRLOGIC(name, tcg_op, opc) \
3736 static void glue(gen_, name)(DisasContext *ctx) \
3741 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3742 t0 = tcg_temp_new_i32(); \
3744 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3746 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3748 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3749 t1 = tcg_temp_new_i32(); \
3750 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3752 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3754 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3756 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3757 tcg_op(t0, t0, t1); \
3758 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3759 tcg_gen_andi_i32(t0, t0, bitmask); \
3760 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3761 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3762 tcg_temp_free_i32(t0); \
3763 tcg_temp_free_i32(t1); \
3767 GEN_CRLOGIC(crand
, tcg_gen_and_i32
, 0x08);
3769 GEN_CRLOGIC(crandc
, tcg_gen_andc_i32
, 0x04);
3771 GEN_CRLOGIC(creqv
, tcg_gen_eqv_i32
, 0x09);
3773 GEN_CRLOGIC(crnand
, tcg_gen_nand_i32
, 0x07);
3775 GEN_CRLOGIC(crnor
, tcg_gen_nor_i32
, 0x01);
3777 GEN_CRLOGIC(cror
, tcg_gen_or_i32
, 0x0E);
3779 GEN_CRLOGIC(crorc
, tcg_gen_orc_i32
, 0x0D);
3781 GEN_CRLOGIC(crxor
, tcg_gen_xor_i32
, 0x06);
3784 static void gen_mcrf(DisasContext
*ctx
)
3786 tcg_gen_mov_i32(cpu_crf
[crfD(ctx
->opcode
)], cpu_crf
[crfS(ctx
->opcode
)]);
3789 /*** System linkage ***/
3791 /* rfi (mem_idx only) */
3792 static void gen_rfi(DisasContext
*ctx
)
3794 #if defined(CONFIG_USER_ONLY)
3795 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3797 /* Restore CPU state */
3798 if (unlikely(!ctx
->mem_idx
)) {
3799 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3802 gen_update_cfar(ctx
, ctx
->nip
);
3803 gen_helper_rfi(cpu_env
);
3804 gen_sync_exception(ctx
);
3808 #if defined(TARGET_PPC64)
3809 static void gen_rfid(DisasContext
*ctx
)
3811 #if defined(CONFIG_USER_ONLY)
3812 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3814 /* Restore CPU state */
3815 if (unlikely(!ctx
->mem_idx
)) {
3816 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3819 gen_update_cfar(ctx
, ctx
->nip
);
3820 gen_helper_rfid(cpu_env
);
3821 gen_sync_exception(ctx
);
3825 static void gen_hrfid(DisasContext
*ctx
)
3827 #if defined(CONFIG_USER_ONLY)
3828 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3830 /* Restore CPU state */
3831 if (unlikely(ctx
->mem_idx
<= 1)) {
3832 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
3835 gen_helper_hrfid(cpu_env
);
3836 gen_sync_exception(ctx
);
3842 #if defined(CONFIG_USER_ONLY)
3843 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3845 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3847 static void gen_sc(DisasContext
*ctx
)
3851 lev
= (ctx
->opcode
>> 5) & 0x7F;
3852 gen_exception_err(ctx
, POWERPC_SYSCALL
, lev
);
3858 static void gen_tw(DisasContext
*ctx
)
3860 TCGv_i32 t0
= tcg_const_i32(TO(ctx
->opcode
));
3861 /* Update the nip since this might generate a trap exception */
3862 gen_update_nip(ctx
, ctx
->nip
);
3863 gen_helper_tw(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
3865 tcg_temp_free_i32(t0
);
3869 static void gen_twi(DisasContext
*ctx
)
3871 TCGv t0
= tcg_const_tl(SIMM(ctx
->opcode
));
3872 TCGv_i32 t1
= tcg_const_i32(TO(ctx
->opcode
));
3873 /* Update the nip since this might generate a trap exception */
3874 gen_update_nip(ctx
, ctx
->nip
);
3875 gen_helper_tw(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
3877 tcg_temp_free_i32(t1
);
3880 #if defined(TARGET_PPC64)
3882 static void gen_td(DisasContext
*ctx
)
3884 TCGv_i32 t0
= tcg_const_i32(TO(ctx
->opcode
));
3885 /* Update the nip since this might generate a trap exception */
3886 gen_update_nip(ctx
, ctx
->nip
);
3887 gen_helper_td(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)],
3889 tcg_temp_free_i32(t0
);
3893 static void gen_tdi(DisasContext
*ctx
)
3895 TCGv t0
= tcg_const_tl(SIMM(ctx
->opcode
));
3896 TCGv_i32 t1
= tcg_const_i32(TO(ctx
->opcode
));
3897 /* Update the nip since this might generate a trap exception */
3898 gen_update_nip(ctx
, ctx
->nip
);
3899 gen_helper_td(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
3901 tcg_temp_free_i32(t1
);
3905 /*** Processor control ***/
3907 static void gen_read_xer(TCGv dst
)
3909 TCGv t0
= tcg_temp_new();
3910 TCGv t1
= tcg_temp_new();
3911 TCGv t2
= tcg_temp_new();
3912 tcg_gen_mov_tl(dst
, cpu_xer
);
3913 tcg_gen_shli_tl(t0
, cpu_so
, XER_SO
);
3914 tcg_gen_shli_tl(t1
, cpu_ov
, XER_OV
);
3915 tcg_gen_shli_tl(t2
, cpu_ca
, XER_CA
);
3916 tcg_gen_or_tl(t0
, t0
, t1
);
3917 tcg_gen_or_tl(dst
, dst
, t2
);
3918 tcg_gen_or_tl(dst
, dst
, t0
);
3924 static void gen_write_xer(TCGv src
)
3926 tcg_gen_andi_tl(cpu_xer
, src
,
3927 ~((1u << XER_SO
) | (1u << XER_OV
) | (1u << XER_CA
)));
3928 tcg_gen_shri_tl(cpu_so
, src
, XER_SO
);
3929 tcg_gen_shri_tl(cpu_ov
, src
, XER_OV
);
3930 tcg_gen_shri_tl(cpu_ca
, src
, XER_CA
);
3931 tcg_gen_andi_tl(cpu_so
, cpu_so
, 1);
3932 tcg_gen_andi_tl(cpu_ov
, cpu_ov
, 1);
3933 tcg_gen_andi_tl(cpu_ca
, cpu_ca
, 1);
3937 static void gen_mcrxr(DisasContext
*ctx
)
3939 TCGv_i32 t0
= tcg_temp_new_i32();
3940 TCGv_i32 t1
= tcg_temp_new_i32();
3941 TCGv_i32 dst
= cpu_crf
[crfD(ctx
->opcode
)];
3943 tcg_gen_trunc_tl_i32(t0
, cpu_so
);
3944 tcg_gen_trunc_tl_i32(t1
, cpu_ov
);
3945 tcg_gen_trunc_tl_i32(dst
, cpu_ca
);
3946 tcg_gen_shri_i32(t0
, t0
, 2);
3947 tcg_gen_shri_i32(t1
, t1
, 1);
3948 tcg_gen_or_i32(dst
, dst
, t0
);
3949 tcg_gen_or_i32(dst
, dst
, t1
);
3950 tcg_temp_free_i32(t0
);
3951 tcg_temp_free_i32(t1
);
3953 tcg_gen_movi_tl(cpu_so
, 0);
3954 tcg_gen_movi_tl(cpu_ov
, 0);
3955 tcg_gen_movi_tl(cpu_ca
, 0);
3959 static void gen_mfcr(DisasContext
*ctx
)
3963 if (likely(ctx
->opcode
& 0x00100000)) {
3964 crm
= CRM(ctx
->opcode
);
3965 if (likely(crm
&& ((crm
& (crm
- 1)) == 0))) {
3967 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_crf
[7 - crn
]);
3968 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)],
3969 cpu_gpr
[rD(ctx
->opcode
)], crn
* 4);
3972 TCGv_i32 t0
= tcg_temp_new_i32();
3973 tcg_gen_mov_i32(t0
, cpu_crf
[0]);
3974 tcg_gen_shli_i32(t0
, t0
, 4);
3975 tcg_gen_or_i32(t0
, t0
, cpu_crf
[1]);
3976 tcg_gen_shli_i32(t0
, t0
, 4);
3977 tcg_gen_or_i32(t0
, t0
, cpu_crf
[2]);
3978 tcg_gen_shli_i32(t0
, t0
, 4);
3979 tcg_gen_or_i32(t0
, t0
, cpu_crf
[3]);
3980 tcg_gen_shli_i32(t0
, t0
, 4);
3981 tcg_gen_or_i32(t0
, t0
, cpu_crf
[4]);
3982 tcg_gen_shli_i32(t0
, t0
, 4);
3983 tcg_gen_or_i32(t0
, t0
, cpu_crf
[5]);
3984 tcg_gen_shli_i32(t0
, t0
, 4);
3985 tcg_gen_or_i32(t0
, t0
, cpu_crf
[6]);
3986 tcg_gen_shli_i32(t0
, t0
, 4);
3987 tcg_gen_or_i32(t0
, t0
, cpu_crf
[7]);
3988 tcg_gen_extu_i32_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
3989 tcg_temp_free_i32(t0
);
3994 static void gen_mfmsr(DisasContext
*ctx
)
3996 #if defined(CONFIG_USER_ONLY)
3997 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
3999 if (unlikely(!ctx
->mem_idx
)) {
4000 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4003 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_msr
);
4007 static void spr_noaccess(void *opaque
, int gprn
, int sprn
)
4010 sprn
= ((sprn
>> 5) & 0x1F) | ((sprn
& 0x1F) << 5);
4011 printf("ERROR: try to access SPR %d !\n", sprn
);
4014 #define SPR_NOACCESS (&spr_noaccess)
4017 static inline void gen_op_mfspr(DisasContext
*ctx
)
4019 void (*read_cb
)(void *opaque
, int gprn
, int sprn
);
4020 uint32_t sprn
= SPR(ctx
->opcode
);
4022 #if !defined(CONFIG_USER_ONLY)
4023 if (ctx
->mem_idx
== 2)
4024 read_cb
= ctx
->spr_cb
[sprn
].hea_read
;
4025 else if (ctx
->mem_idx
)
4026 read_cb
= ctx
->spr_cb
[sprn
].oea_read
;
4029 read_cb
= ctx
->spr_cb
[sprn
].uea_read
;
4030 if (likely(read_cb
!= NULL
)) {
4031 if (likely(read_cb
!= SPR_NOACCESS
)) {
4032 (*read_cb
)(ctx
, rD(ctx
->opcode
), sprn
);
4034 /* Privilege exception */
4035 /* This is a hack to avoid warnings when running Linux:
4036 * this OS breaks the PowerPC virtualisation model,
4037 * allowing userland application to read the PVR
4039 if (sprn
!= SPR_PVR
) {
4040 qemu_log("Trying to read privileged spr %d (0x%03x) at "
4041 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4042 printf("Trying to read privileged spr %d (0x%03x) at "
4043 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4045 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4049 qemu_log("Trying to read invalid spr %d (0x%03x) at "
4050 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4051 printf("Trying to read invalid spr %d (0x%03x) at "
4052 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4053 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4057 static void gen_mfspr(DisasContext
*ctx
)
4063 static void gen_mftb(DisasContext
*ctx
)
4069 static void gen_mtcrf(DisasContext
*ctx
)
4073 crm
= CRM(ctx
->opcode
);
4074 if (likely((ctx
->opcode
& 0x00100000))) {
4075 if (crm
&& ((crm
& (crm
- 1)) == 0)) {
4076 TCGv_i32 temp
= tcg_temp_new_i32();
4078 tcg_gen_trunc_tl_i32(temp
, cpu_gpr
[rS(ctx
->opcode
)]);
4079 tcg_gen_shri_i32(temp
, temp
, crn
* 4);
4080 tcg_gen_andi_i32(cpu_crf
[7 - crn
], temp
, 0xf);
4081 tcg_temp_free_i32(temp
);
4084 TCGv_i32 temp
= tcg_temp_new_i32();
4085 tcg_gen_trunc_tl_i32(temp
, cpu_gpr
[rS(ctx
->opcode
)]);
4086 for (crn
= 0 ; crn
< 8 ; crn
++) {
4087 if (crm
& (1 << crn
)) {
4088 tcg_gen_shri_i32(cpu_crf
[7 - crn
], temp
, crn
* 4);
4089 tcg_gen_andi_i32(cpu_crf
[7 - crn
], cpu_crf
[7 - crn
], 0xf);
4092 tcg_temp_free_i32(temp
);
4097 #if defined(TARGET_PPC64)
4098 static void gen_mtmsrd(DisasContext
*ctx
)
4100 #if defined(CONFIG_USER_ONLY)
4101 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4103 if (unlikely(!ctx
->mem_idx
)) {
4104 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4107 if (ctx
->opcode
& 0x00010000) {
4108 /* Special form that does not need any synchronisation */
4109 TCGv t0
= tcg_temp_new();
4110 tcg_gen_andi_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], (1 << MSR_RI
) | (1 << MSR_EE
));
4111 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~((1 << MSR_RI
) | (1 << MSR_EE
)));
4112 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
4115 /* XXX: we need to update nip before the store
4116 * if we enter power saving mode, we will exit the loop
4117 * directly from ppc_store_msr
4119 gen_update_nip(ctx
, ctx
->nip
);
4120 gen_helper_store_msr(cpu_env
, cpu_gpr
[rS(ctx
->opcode
)]);
4121 /* Must stop the translation as machine state (may have) changed */
4122 /* Note that mtmsr is not always defined as context-synchronizing */
4123 gen_stop_exception(ctx
);
4129 static void gen_mtmsr(DisasContext
*ctx
)
4131 #if defined(CONFIG_USER_ONLY)
4132 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4134 if (unlikely(!ctx
->mem_idx
)) {
4135 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4138 if (ctx
->opcode
& 0x00010000) {
4139 /* Special form that does not need any synchronisation */
4140 TCGv t0
= tcg_temp_new();
4141 tcg_gen_andi_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], (1 << MSR_RI
) | (1 << MSR_EE
));
4142 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~((1 << MSR_RI
) | (1 << MSR_EE
)));
4143 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
4146 TCGv msr
= tcg_temp_new();
4148 /* XXX: we need to update nip before the store
4149 * if we enter power saving mode, we will exit the loop
4150 * directly from ppc_store_msr
4152 gen_update_nip(ctx
, ctx
->nip
);
4153 #if defined(TARGET_PPC64)
4154 tcg_gen_deposit_tl(msr
, cpu_msr
, cpu_gpr
[rS(ctx
->opcode
)], 0, 32);
4156 tcg_gen_mov_tl(msr
, cpu_gpr
[rS(ctx
->opcode
)]);
4158 gen_helper_store_msr(cpu_env
, msr
);
4159 /* Must stop the translation as machine state (may have) changed */
4160 /* Note that mtmsr is not always defined as context-synchronizing */
4161 gen_stop_exception(ctx
);
4167 static void gen_mtspr(DisasContext
*ctx
)
4169 void (*write_cb
)(void *opaque
, int sprn
, int gprn
);
4170 uint32_t sprn
= SPR(ctx
->opcode
);
4172 #if !defined(CONFIG_USER_ONLY)
4173 if (ctx
->mem_idx
== 2)
4174 write_cb
= ctx
->spr_cb
[sprn
].hea_write
;
4175 else if (ctx
->mem_idx
)
4176 write_cb
= ctx
->spr_cb
[sprn
].oea_write
;
4179 write_cb
= ctx
->spr_cb
[sprn
].uea_write
;
4180 if (likely(write_cb
!= NULL
)) {
4181 if (likely(write_cb
!= SPR_NOACCESS
)) {
4182 (*write_cb
)(ctx
, sprn
, rS(ctx
->opcode
));
4184 /* Privilege exception */
4185 qemu_log("Trying to write privileged spr %d (0x%03x) at "
4186 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4187 printf("Trying to write privileged spr %d (0x%03x) at "
4188 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4189 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4193 qemu_log("Trying to write invalid spr %d (0x%03x) at "
4194 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4195 printf("Trying to write invalid spr %d (0x%03x) at "
4196 TARGET_FMT_lx
"\n", sprn
, sprn
, ctx
->nip
- 4);
4197 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_SPR
);
4201 /*** Cache management ***/
4204 static void gen_dcbf(DisasContext
*ctx
)
4206 /* XXX: specification says this is treated as a load by the MMU */
4208 gen_set_access_type(ctx
, ACCESS_CACHE
);
4209 t0
= tcg_temp_new();
4210 gen_addr_reg_index(ctx
, t0
);
4211 gen_qemu_ld8u(ctx
, t0
, t0
);
4215 /* dcbi (Supervisor only) */
4216 static void gen_dcbi(DisasContext
*ctx
)
4218 #if defined(CONFIG_USER_ONLY)
4219 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4222 if (unlikely(!ctx
->mem_idx
)) {
4223 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4226 EA
= tcg_temp_new();
4227 gen_set_access_type(ctx
, ACCESS_CACHE
);
4228 gen_addr_reg_index(ctx
, EA
);
4229 val
= tcg_temp_new();
4230 /* XXX: specification says this should be treated as a store by the MMU */
4231 gen_qemu_ld8u(ctx
, val
, EA
);
4232 gen_qemu_st8(ctx
, val
, EA
);
4239 static void gen_dcbst(DisasContext
*ctx
)
4241 /* XXX: specification say this is treated as a load by the MMU */
4243 gen_set_access_type(ctx
, ACCESS_CACHE
);
4244 t0
= tcg_temp_new();
4245 gen_addr_reg_index(ctx
, t0
);
4246 gen_qemu_ld8u(ctx
, t0
, t0
);
4251 static void gen_dcbt(DisasContext
*ctx
)
4253 /* interpreted as no-op */
4254 /* XXX: specification say this is treated as a load by the MMU
4255 * but does not generate any exception
4260 static void gen_dcbtst(DisasContext
*ctx
)
4262 /* interpreted as no-op */
4263 /* XXX: specification say this is treated as a load by the MMU
4264 * but does not generate any exception
4269 static void gen_dcbz(DisasContext
*ctx
)
4272 TCGv_i32 tcgv_is_dcbzl
;
4273 int is_dcbzl
= ctx
->opcode
& 0x00200000 ? 1 : 0;
4275 gen_set_access_type(ctx
, ACCESS_CACHE
);
4276 /* NIP cannot be restored if the memory exception comes from an helper */
4277 gen_update_nip(ctx
, ctx
->nip
- 4);
4278 tcgv_addr
= tcg_temp_new();
4279 tcgv_is_dcbzl
= tcg_const_i32(is_dcbzl
);
4281 gen_addr_reg_index(ctx
, tcgv_addr
);
4282 gen_helper_dcbz(cpu_env
, tcgv_addr
, tcgv_is_dcbzl
);
4284 tcg_temp_free(tcgv_addr
);
4285 tcg_temp_free_i32(tcgv_is_dcbzl
);
4289 static void gen_dst(DisasContext
*ctx
)
4291 if (rA(ctx
->opcode
) == 0) {
4292 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
4294 /* interpreted as no-op */
4299 static void gen_dstst(DisasContext
*ctx
)
4301 if (rA(ctx
->opcode
) == 0) {
4302 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_LSWX
);
4304 /* interpreted as no-op */
4310 static void gen_dss(DisasContext
*ctx
)
4312 /* interpreted as no-op */
4316 static void gen_icbi(DisasContext
*ctx
)
4319 gen_set_access_type(ctx
, ACCESS_CACHE
);
4320 /* NIP cannot be restored if the memory exception comes from an helper */
4321 gen_update_nip(ctx
, ctx
->nip
- 4);
4322 t0
= tcg_temp_new();
4323 gen_addr_reg_index(ctx
, t0
);
4324 gen_helper_icbi(cpu_env
, t0
);
4330 static void gen_dcba(DisasContext
*ctx
)
4332 /* interpreted as no-op */
4333 /* XXX: specification say this is treated as a store by the MMU
4334 * but does not generate any exception
4338 /*** Segment register manipulation ***/
4339 /* Supervisor only: */
4342 static void gen_mfsr(DisasContext
*ctx
)
4344 #if defined(CONFIG_USER_ONLY)
4345 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4348 if (unlikely(!ctx
->mem_idx
)) {
4349 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4352 t0
= tcg_const_tl(SR(ctx
->opcode
));
4353 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
4359 static void gen_mfsrin(DisasContext
*ctx
)
4361 #if defined(CONFIG_USER_ONLY)
4362 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4365 if (unlikely(!ctx
->mem_idx
)) {
4366 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4369 t0
= tcg_temp_new();
4370 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
4371 tcg_gen_andi_tl(t0
, t0
, 0xF);
4372 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
4378 static void gen_mtsr(DisasContext
*ctx
)
4380 #if defined(CONFIG_USER_ONLY)
4381 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4384 if (unlikely(!ctx
->mem_idx
)) {
4385 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4388 t0
= tcg_const_tl(SR(ctx
->opcode
));
4389 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
4395 static void gen_mtsrin(DisasContext
*ctx
)
4397 #if defined(CONFIG_USER_ONLY)
4398 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4401 if (unlikely(!ctx
->mem_idx
)) {
4402 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4405 t0
= tcg_temp_new();
4406 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
4407 tcg_gen_andi_tl(t0
, t0
, 0xF);
4408 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rD(ctx
->opcode
)]);
4413 #if defined(TARGET_PPC64)
4414 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4417 static void gen_mfsr_64b(DisasContext
*ctx
)
4419 #if defined(CONFIG_USER_ONLY)
4420 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4423 if (unlikely(!ctx
->mem_idx
)) {
4424 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4427 t0
= tcg_const_tl(SR(ctx
->opcode
));
4428 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
4434 static void gen_mfsrin_64b(DisasContext
*ctx
)
4436 #if defined(CONFIG_USER_ONLY)
4437 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4440 if (unlikely(!ctx
->mem_idx
)) {
4441 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4444 t0
= tcg_temp_new();
4445 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
4446 tcg_gen_andi_tl(t0
, t0
, 0xF);
4447 gen_helper_load_sr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
4453 static void gen_mtsr_64b(DisasContext
*ctx
)
4455 #if defined(CONFIG_USER_ONLY)
4456 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4459 if (unlikely(!ctx
->mem_idx
)) {
4460 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4463 t0
= tcg_const_tl(SR(ctx
->opcode
));
4464 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
4470 static void gen_mtsrin_64b(DisasContext
*ctx
)
4472 #if defined(CONFIG_USER_ONLY)
4473 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4476 if (unlikely(!ctx
->mem_idx
)) {
4477 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4480 t0
= tcg_temp_new();
4481 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 28);
4482 tcg_gen_andi_tl(t0
, t0
, 0xF);
4483 gen_helper_store_sr(cpu_env
, t0
, cpu_gpr
[rS(ctx
->opcode
)]);
4489 static void gen_slbmte(DisasContext
*ctx
)
4491 #if defined(CONFIG_USER_ONLY)
4492 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4494 if (unlikely(!ctx
->mem_idx
)) {
4495 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4498 gen_helper_store_slb(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)],
4499 cpu_gpr
[rS(ctx
->opcode
)]);
4503 static void gen_slbmfee(DisasContext
*ctx
)
4505 #if defined(CONFIG_USER_ONLY)
4506 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4508 if (unlikely(!ctx
->mem_idx
)) {
4509 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4512 gen_helper_load_slb_esid(cpu_gpr
[rS(ctx
->opcode
)], cpu_env
,
4513 cpu_gpr
[rB(ctx
->opcode
)]);
4517 static void gen_slbmfev(DisasContext
*ctx
)
4519 #if defined(CONFIG_USER_ONLY)
4520 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4522 if (unlikely(!ctx
->mem_idx
)) {
4523 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
4526 gen_helper_load_slb_vsid(cpu_gpr
[rS(ctx
->opcode
)], cpu_env
,
4527 cpu_gpr
[rB(ctx
->opcode
)]);
4530 #endif /* defined(TARGET_PPC64) */
4532 /*** Lookaside buffer management ***/
4533 /* Optional & mem_idx only: */
4536 static void gen_tlbia(DisasContext
*ctx
)
4538 #if defined(CONFIG_USER_ONLY)
4539 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4541 if (unlikely(!ctx
->mem_idx
)) {
4542 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4545 gen_helper_tlbia(cpu_env
);
4550 static void gen_tlbiel(DisasContext
*ctx
)
4552 #if defined(CONFIG_USER_ONLY)
4553 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4555 if (unlikely(!ctx
->mem_idx
)) {
4556 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4559 gen_helper_tlbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
4564 static void gen_tlbie(DisasContext
*ctx
)
4566 #if defined(CONFIG_USER_ONLY)
4567 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4569 if (unlikely(!ctx
->mem_idx
)) {
4570 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4573 if (NARROW_MODE(ctx
)) {
4574 TCGv t0
= tcg_temp_new();
4575 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
4576 gen_helper_tlbie(cpu_env
, t0
);
4579 gen_helper_tlbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
4585 static void gen_tlbsync(DisasContext
*ctx
)
4587 #if defined(CONFIG_USER_ONLY)
4588 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4590 if (unlikely(!ctx
->mem_idx
)) {
4591 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4594 /* This has no effect: it should ensure that all previous
4595 * tlbie have completed
4597 gen_stop_exception(ctx
);
4601 #if defined(TARGET_PPC64)
4603 static void gen_slbia(DisasContext
*ctx
)
4605 #if defined(CONFIG_USER_ONLY)
4606 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4608 if (unlikely(!ctx
->mem_idx
)) {
4609 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4612 gen_helper_slbia(cpu_env
);
4617 static void gen_slbie(DisasContext
*ctx
)
4619 #if defined(CONFIG_USER_ONLY)
4620 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4622 if (unlikely(!ctx
->mem_idx
)) {
4623 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
4626 gen_helper_slbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
4631 /*** External control ***/
4635 static void gen_eciwx(DisasContext
*ctx
)
4638 /* Should check EAR[E] ! */
4639 gen_set_access_type(ctx
, ACCESS_EXT
);
4640 t0
= tcg_temp_new();
4641 gen_addr_reg_index(ctx
, t0
);
4642 gen_check_align(ctx
, t0
, 0x03);
4643 gen_qemu_ld32u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], t0
);
4648 static void gen_ecowx(DisasContext
*ctx
)
4651 /* Should check EAR[E] ! */
4652 gen_set_access_type(ctx
, ACCESS_EXT
);
4653 t0
= tcg_temp_new();
4654 gen_addr_reg_index(ctx
, t0
);
4655 gen_check_align(ctx
, t0
, 0x03);
4656 gen_qemu_st32(ctx
, cpu_gpr
[rD(ctx
->opcode
)], t0
);
4660 /* PowerPC 601 specific instructions */
4663 static void gen_abs(DisasContext
*ctx
)
4665 int l1
= gen_new_label();
4666 int l2
= gen_new_label();
4667 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
4668 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4671 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4673 if (unlikely(Rc(ctx
->opcode
) != 0))
4674 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4678 static void gen_abso(DisasContext
*ctx
)
4680 int l1
= gen_new_label();
4681 int l2
= gen_new_label();
4682 int l3
= gen_new_label();
4683 /* Start with XER OV disabled, the most likely case */
4684 tcg_gen_movi_tl(cpu_ov
, 0);
4685 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rA(ctx
->opcode
)], 0, l2
);
4686 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rA(ctx
->opcode
)], 0x80000000, l1
);
4687 tcg_gen_movi_tl(cpu_ov
, 1);
4688 tcg_gen_movi_tl(cpu_so
, 1);
4691 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4694 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4696 if (unlikely(Rc(ctx
->opcode
) != 0))
4697 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4701 static void gen_clcs(DisasContext
*ctx
)
4703 TCGv_i32 t0
= tcg_const_i32(rA(ctx
->opcode
));
4704 gen_helper_clcs(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
4705 tcg_temp_free_i32(t0
);
4706 /* Rc=1 sets CR0 to an undefined state */
4710 static void gen_div(DisasContext
*ctx
)
4712 gen_helper_div(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
4713 cpu_gpr
[rB(ctx
->opcode
)]);
4714 if (unlikely(Rc(ctx
->opcode
) != 0))
4715 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4719 static void gen_divo(DisasContext
*ctx
)
4721 gen_helper_divo(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
4722 cpu_gpr
[rB(ctx
->opcode
)]);
4723 if (unlikely(Rc(ctx
->opcode
) != 0))
4724 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4728 static void gen_divs(DisasContext
*ctx
)
4730 gen_helper_divs(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
4731 cpu_gpr
[rB(ctx
->opcode
)]);
4732 if (unlikely(Rc(ctx
->opcode
) != 0))
4733 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4736 /* divso - divso. */
4737 static void gen_divso(DisasContext
*ctx
)
4739 gen_helper_divso(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
4740 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
4741 if (unlikely(Rc(ctx
->opcode
) != 0))
4742 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4746 static void gen_doz(DisasContext
*ctx
)
4748 int l1
= gen_new_label();
4749 int l2
= gen_new_label();
4750 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], l1
);
4751 tcg_gen_sub_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4754 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
4756 if (unlikely(Rc(ctx
->opcode
) != 0))
4757 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4761 static void gen_dozo(DisasContext
*ctx
)
4763 int l1
= gen_new_label();
4764 int l2
= gen_new_label();
4765 TCGv t0
= tcg_temp_new();
4766 TCGv t1
= tcg_temp_new();
4767 TCGv t2
= tcg_temp_new();
4768 /* Start with XER OV disabled, the most likely case */
4769 tcg_gen_movi_tl(cpu_ov
, 0);
4770 tcg_gen_brcond_tl(TCG_COND_GE
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], l1
);
4771 tcg_gen_sub_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4772 tcg_gen_xor_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4773 tcg_gen_xor_tl(t2
, cpu_gpr
[rA(ctx
->opcode
)], t0
);
4774 tcg_gen_andc_tl(t1
, t1
, t2
);
4775 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
4776 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l2
);
4777 tcg_gen_movi_tl(cpu_ov
, 1);
4778 tcg_gen_movi_tl(cpu_so
, 1);
4781 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
4786 if (unlikely(Rc(ctx
->opcode
) != 0))
4787 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4791 static void gen_dozi(DisasContext
*ctx
)
4793 target_long simm
= SIMM(ctx
->opcode
);
4794 int l1
= gen_new_label();
4795 int l2
= gen_new_label();
4796 tcg_gen_brcondi_tl(TCG_COND_LT
, cpu_gpr
[rA(ctx
->opcode
)], simm
, l1
);
4797 tcg_gen_subfi_tl(cpu_gpr
[rD(ctx
->opcode
)], simm
, cpu_gpr
[rA(ctx
->opcode
)]);
4800 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], 0);
4802 if (unlikely(Rc(ctx
->opcode
) != 0))
4803 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4806 /* lscbx - lscbx. */
4807 static void gen_lscbx(DisasContext
*ctx
)
4809 TCGv t0
= tcg_temp_new();
4810 TCGv_i32 t1
= tcg_const_i32(rD(ctx
->opcode
));
4811 TCGv_i32 t2
= tcg_const_i32(rA(ctx
->opcode
));
4812 TCGv_i32 t3
= tcg_const_i32(rB(ctx
->opcode
));
4814 gen_addr_reg_index(ctx
, t0
);
4815 /* NIP cannot be restored if the memory exception comes from an helper */
4816 gen_update_nip(ctx
, ctx
->nip
- 4);
4817 gen_helper_lscbx(t0
, cpu_env
, t0
, t1
, t2
, t3
);
4818 tcg_temp_free_i32(t1
);
4819 tcg_temp_free_i32(t2
);
4820 tcg_temp_free_i32(t3
);
4821 tcg_gen_andi_tl(cpu_xer
, cpu_xer
, ~0x7F);
4822 tcg_gen_or_tl(cpu_xer
, cpu_xer
, t0
);
4823 if (unlikely(Rc(ctx
->opcode
) != 0))
4824 gen_set_Rc0(ctx
, t0
);
4828 /* maskg - maskg. */
4829 static void gen_maskg(DisasContext
*ctx
)
4831 int l1
= gen_new_label();
4832 TCGv t0
= tcg_temp_new();
4833 TCGv t1
= tcg_temp_new();
4834 TCGv t2
= tcg_temp_new();
4835 TCGv t3
= tcg_temp_new();
4836 tcg_gen_movi_tl(t3
, 0xFFFFFFFF);
4837 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
4838 tcg_gen_andi_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 0x1F);
4839 tcg_gen_addi_tl(t2
, t0
, 1);
4840 tcg_gen_shr_tl(t2
, t3
, t2
);
4841 tcg_gen_shr_tl(t3
, t3
, t1
);
4842 tcg_gen_xor_tl(cpu_gpr
[rA(ctx
->opcode
)], t2
, t3
);
4843 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4844 tcg_gen_neg_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4850 if (unlikely(Rc(ctx
->opcode
) != 0))
4851 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
4854 /* maskir - maskir. */
4855 static void gen_maskir(DisasContext
*ctx
)
4857 TCGv t0
= tcg_temp_new();
4858 TCGv t1
= tcg_temp_new();
4859 tcg_gen_and_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
4860 tcg_gen_andc_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
4861 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
4864 if (unlikely(Rc(ctx
->opcode
) != 0))
4865 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
4869 static void gen_mul(DisasContext
*ctx
)
4871 TCGv_i64 t0
= tcg_temp_new_i64();
4872 TCGv_i64 t1
= tcg_temp_new_i64();
4873 TCGv t2
= tcg_temp_new();
4874 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
4875 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
4876 tcg_gen_mul_i64(t0
, t0
, t1
);
4877 tcg_gen_trunc_i64_tl(t2
, t0
);
4878 gen_store_spr(SPR_MQ
, t2
);
4879 tcg_gen_shri_i64(t1
, t0
, 32);
4880 tcg_gen_trunc_i64_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
4881 tcg_temp_free_i64(t0
);
4882 tcg_temp_free_i64(t1
);
4884 if (unlikely(Rc(ctx
->opcode
) != 0))
4885 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4889 static void gen_mulo(DisasContext
*ctx
)
4891 int l1
= gen_new_label();
4892 TCGv_i64 t0
= tcg_temp_new_i64();
4893 TCGv_i64 t1
= tcg_temp_new_i64();
4894 TCGv t2
= tcg_temp_new();
4895 /* Start with XER OV disabled, the most likely case */
4896 tcg_gen_movi_tl(cpu_ov
, 0);
4897 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
4898 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
4899 tcg_gen_mul_i64(t0
, t0
, t1
);
4900 tcg_gen_trunc_i64_tl(t2
, t0
);
4901 gen_store_spr(SPR_MQ
, t2
);
4902 tcg_gen_shri_i64(t1
, t0
, 32);
4903 tcg_gen_trunc_i64_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
);
4904 tcg_gen_ext32s_i64(t1
, t0
);
4905 tcg_gen_brcond_i64(TCG_COND_EQ
, t0
, t1
, l1
);
4906 tcg_gen_movi_tl(cpu_ov
, 1);
4907 tcg_gen_movi_tl(cpu_so
, 1);
4909 tcg_temp_free_i64(t0
);
4910 tcg_temp_free_i64(t1
);
4912 if (unlikely(Rc(ctx
->opcode
) != 0))
4913 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4917 static void gen_nabs(DisasContext
*ctx
)
4919 int l1
= gen_new_label();
4920 int l2
= gen_new_label();
4921 tcg_gen_brcondi_tl(TCG_COND_GT
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
4922 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4925 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4927 if (unlikely(Rc(ctx
->opcode
) != 0))
4928 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4931 /* nabso - nabso. */
4932 static void gen_nabso(DisasContext
*ctx
)
4934 int l1
= gen_new_label();
4935 int l2
= gen_new_label();
4936 tcg_gen_brcondi_tl(TCG_COND_GT
, cpu_gpr
[rA(ctx
->opcode
)], 0, l1
);
4937 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4940 tcg_gen_neg_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
4942 /* nabs never overflows */
4943 tcg_gen_movi_tl(cpu_ov
, 0);
4944 if (unlikely(Rc(ctx
->opcode
) != 0))
4945 gen_set_Rc0(ctx
, cpu_gpr
[rD(ctx
->opcode
)]);
4949 static void gen_rlmi(DisasContext
*ctx
)
4951 uint32_t mb
= MB(ctx
->opcode
);
4952 uint32_t me
= ME(ctx
->opcode
);
4953 TCGv t0
= tcg_temp_new();
4954 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
4955 tcg_gen_rotl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
4956 tcg_gen_andi_tl(t0
, t0
, MASK(mb
, me
));
4957 tcg_gen_andi_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], ~MASK(mb
, me
));
4958 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], t0
);
4960 if (unlikely(Rc(ctx
->opcode
) != 0))
4961 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
4965 static void gen_rrib(DisasContext
*ctx
)
4967 TCGv t0
= tcg_temp_new();
4968 TCGv t1
= tcg_temp_new();
4969 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
4970 tcg_gen_movi_tl(t1
, 0x80000000);
4971 tcg_gen_shr_tl(t1
, t1
, t0
);
4972 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
4973 tcg_gen_and_tl(t0
, t0
, t1
);
4974 tcg_gen_andc_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], t1
);
4975 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
4978 if (unlikely(Rc(ctx
->opcode
) != 0))
4979 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
4983 static void gen_sle(DisasContext
*ctx
)
4985 TCGv t0
= tcg_temp_new();
4986 TCGv t1
= tcg_temp_new();
4987 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
4988 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
4989 tcg_gen_subfi_tl(t1
, 32, t1
);
4990 tcg_gen_shr_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
4991 tcg_gen_or_tl(t1
, t0
, t1
);
4992 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
4993 gen_store_spr(SPR_MQ
, t1
);
4996 if (unlikely(Rc(ctx
->opcode
) != 0))
4997 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5001 static void gen_sleq(DisasContext
*ctx
)
5003 TCGv t0
= tcg_temp_new();
5004 TCGv t1
= tcg_temp_new();
5005 TCGv t2
= tcg_temp_new();
5006 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5007 tcg_gen_movi_tl(t2
, 0xFFFFFFFF);
5008 tcg_gen_shl_tl(t2
, t2
, t0
);
5009 tcg_gen_rotl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5010 gen_load_spr(t1
, SPR_MQ
);
5011 gen_store_spr(SPR_MQ
, t0
);
5012 tcg_gen_and_tl(t0
, t0
, t2
);
5013 tcg_gen_andc_tl(t1
, t1
, t2
);
5014 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5018 if (unlikely(Rc(ctx
->opcode
) != 0))
5019 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5023 static void gen_sliq(DisasContext
*ctx
)
5025 int sh
= SH(ctx
->opcode
);
5026 TCGv t0
= tcg_temp_new();
5027 TCGv t1
= tcg_temp_new();
5028 tcg_gen_shli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5029 tcg_gen_shri_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5030 tcg_gen_or_tl(t1
, t0
, t1
);
5031 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5032 gen_store_spr(SPR_MQ
, t1
);
5035 if (unlikely(Rc(ctx
->opcode
) != 0))
5036 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5039 /* slliq - slliq. */
5040 static void gen_slliq(DisasContext
*ctx
)
5042 int sh
= SH(ctx
->opcode
);
5043 TCGv t0
= tcg_temp_new();
5044 TCGv t1
= tcg_temp_new();
5045 tcg_gen_rotli_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5046 gen_load_spr(t1
, SPR_MQ
);
5047 gen_store_spr(SPR_MQ
, t0
);
5048 tcg_gen_andi_tl(t0
, t0
, (0xFFFFFFFFU
<< sh
));
5049 tcg_gen_andi_tl(t1
, t1
, ~(0xFFFFFFFFU
<< sh
));
5050 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5053 if (unlikely(Rc(ctx
->opcode
) != 0))
5054 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5058 static void gen_sllq(DisasContext
*ctx
)
5060 int l1
= gen_new_label();
5061 int l2
= gen_new_label();
5062 TCGv t0
= tcg_temp_local_new();
5063 TCGv t1
= tcg_temp_local_new();
5064 TCGv t2
= tcg_temp_local_new();
5065 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5066 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5067 tcg_gen_shl_tl(t1
, t1
, t2
);
5068 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5069 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
5070 gen_load_spr(t0
, SPR_MQ
);
5071 tcg_gen_and_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5074 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5075 gen_load_spr(t2
, SPR_MQ
);
5076 tcg_gen_andc_tl(t1
, t2
, t1
);
5077 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5082 if (unlikely(Rc(ctx
->opcode
) != 0))
5083 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5087 static void gen_slq(DisasContext
*ctx
)
5089 int l1
= gen_new_label();
5090 TCGv t0
= tcg_temp_new();
5091 TCGv t1
= tcg_temp_new();
5092 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5093 tcg_gen_shl_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5094 tcg_gen_subfi_tl(t1
, 32, t1
);
5095 tcg_gen_shr_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5096 tcg_gen_or_tl(t1
, t0
, t1
);
5097 gen_store_spr(SPR_MQ
, t1
);
5098 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5099 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5100 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5101 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
5105 if (unlikely(Rc(ctx
->opcode
) != 0))
5106 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5109 /* sraiq - sraiq. */
5110 static void gen_sraiq(DisasContext
*ctx
)
5112 int sh
= SH(ctx
->opcode
);
5113 int l1
= gen_new_label();
5114 TCGv t0
= tcg_temp_new();
5115 TCGv t1
= tcg_temp_new();
5116 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5117 tcg_gen_shli_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5118 tcg_gen_or_tl(t0
, t0
, t1
);
5119 gen_store_spr(SPR_MQ
, t0
);
5120 tcg_gen_movi_tl(cpu_ca
, 0);
5121 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
5122 tcg_gen_brcondi_tl(TCG_COND_GE
, cpu_gpr
[rS(ctx
->opcode
)], 0, l1
);
5123 tcg_gen_movi_tl(cpu_ca
, 1);
5125 tcg_gen_sari_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], sh
);
5128 if (unlikely(Rc(ctx
->opcode
) != 0))
5129 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5133 static void gen_sraq(DisasContext
*ctx
)
5135 int l1
= gen_new_label();
5136 int l2
= gen_new_label();
5137 TCGv t0
= tcg_temp_new();
5138 TCGv t1
= tcg_temp_local_new();
5139 TCGv t2
= tcg_temp_local_new();
5140 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5141 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5142 tcg_gen_sar_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5143 tcg_gen_subfi_tl(t2
, 32, t2
);
5144 tcg_gen_shl_tl(t2
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5145 tcg_gen_or_tl(t0
, t0
, t2
);
5146 gen_store_spr(SPR_MQ
, t0
);
5147 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5148 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, l1
);
5149 tcg_gen_mov_tl(t2
, cpu_gpr
[rS(ctx
->opcode
)]);
5150 tcg_gen_sari_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 31);
5153 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t1
);
5154 tcg_gen_movi_tl(cpu_ca
, 0);
5155 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l2
);
5156 tcg_gen_brcondi_tl(TCG_COND_EQ
, t2
, 0, l2
);
5157 tcg_gen_movi_tl(cpu_ca
, 1);
5161 if (unlikely(Rc(ctx
->opcode
) != 0))
5162 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5166 static void gen_sre(DisasContext
*ctx
)
5168 TCGv t0
= tcg_temp_new();
5169 TCGv t1
= tcg_temp_new();
5170 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5171 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5172 tcg_gen_subfi_tl(t1
, 32, t1
);
5173 tcg_gen_shl_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5174 tcg_gen_or_tl(t1
, t0
, t1
);
5175 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5176 gen_store_spr(SPR_MQ
, t1
);
5179 if (unlikely(Rc(ctx
->opcode
) != 0))
5180 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5184 static void gen_srea(DisasContext
*ctx
)
5186 TCGv t0
= tcg_temp_new();
5187 TCGv t1
= tcg_temp_new();
5188 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5189 tcg_gen_rotr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5190 gen_store_spr(SPR_MQ
, t0
);
5191 tcg_gen_sar_tl(cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rS(ctx
->opcode
)], t1
);
5194 if (unlikely(Rc(ctx
->opcode
) != 0))
5195 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5199 static void gen_sreq(DisasContext
*ctx
)
5201 TCGv t0
= tcg_temp_new();
5202 TCGv t1
= tcg_temp_new();
5203 TCGv t2
= tcg_temp_new();
5204 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5205 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5206 tcg_gen_shr_tl(t1
, t1
, t0
);
5207 tcg_gen_rotr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t0
);
5208 gen_load_spr(t2
, SPR_MQ
);
5209 gen_store_spr(SPR_MQ
, t0
);
5210 tcg_gen_and_tl(t0
, t0
, t1
);
5211 tcg_gen_andc_tl(t2
, t2
, t1
);
5212 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t2
);
5216 if (unlikely(Rc(ctx
->opcode
) != 0))
5217 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5221 static void gen_sriq(DisasContext
*ctx
)
5223 int sh
= SH(ctx
->opcode
);
5224 TCGv t0
= tcg_temp_new();
5225 TCGv t1
= tcg_temp_new();
5226 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5227 tcg_gen_shli_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], 32 - sh
);
5228 tcg_gen_or_tl(t1
, t0
, t1
);
5229 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5230 gen_store_spr(SPR_MQ
, t1
);
5233 if (unlikely(Rc(ctx
->opcode
) != 0))
5234 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5238 static void gen_srliq(DisasContext
*ctx
)
5240 int sh
= SH(ctx
->opcode
);
5241 TCGv t0
= tcg_temp_new();
5242 TCGv t1
= tcg_temp_new();
5243 tcg_gen_rotri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], sh
);
5244 gen_load_spr(t1
, SPR_MQ
);
5245 gen_store_spr(SPR_MQ
, t0
);
5246 tcg_gen_andi_tl(t0
, t0
, (0xFFFFFFFFU
>> sh
));
5247 tcg_gen_andi_tl(t1
, t1
, ~(0xFFFFFFFFU
>> sh
));
5248 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5251 if (unlikely(Rc(ctx
->opcode
) != 0))
5252 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5256 static void gen_srlq(DisasContext
*ctx
)
5258 int l1
= gen_new_label();
5259 int l2
= gen_new_label();
5260 TCGv t0
= tcg_temp_local_new();
5261 TCGv t1
= tcg_temp_local_new();
5262 TCGv t2
= tcg_temp_local_new();
5263 tcg_gen_andi_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5264 tcg_gen_movi_tl(t1
, 0xFFFFFFFF);
5265 tcg_gen_shr_tl(t2
, t1
, t2
);
5266 tcg_gen_andi_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5267 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
5268 gen_load_spr(t0
, SPR_MQ
);
5269 tcg_gen_and_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t2
);
5272 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t2
);
5273 tcg_gen_and_tl(t0
, t0
, t2
);
5274 gen_load_spr(t1
, SPR_MQ
);
5275 tcg_gen_andc_tl(t1
, t1
, t2
);
5276 tcg_gen_or_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
, t1
);
5281 if (unlikely(Rc(ctx
->opcode
) != 0))
5282 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5286 static void gen_srq(DisasContext
*ctx
)
5288 int l1
= gen_new_label();
5289 TCGv t0
= tcg_temp_new();
5290 TCGv t1
= tcg_temp_new();
5291 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x1F);
5292 tcg_gen_shr_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5293 tcg_gen_subfi_tl(t1
, 32, t1
);
5294 tcg_gen_shl_tl(t1
, cpu_gpr
[rS(ctx
->opcode
)], t1
);
5295 tcg_gen_or_tl(t1
, t0
, t1
);
5296 gen_store_spr(SPR_MQ
, t1
);
5297 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0x20);
5298 tcg_gen_mov_tl(cpu_gpr
[rA(ctx
->opcode
)], t0
);
5299 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
5300 tcg_gen_movi_tl(cpu_gpr
[rA(ctx
->opcode
)], 0);
5304 if (unlikely(Rc(ctx
->opcode
) != 0))
5305 gen_set_Rc0(ctx
, cpu_gpr
[rA(ctx
->opcode
)]);
5308 /* PowerPC 602 specific instructions */
5311 static void gen_dsa(DisasContext
*ctx
)
5314 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
5318 static void gen_esa(DisasContext
*ctx
)
5321 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
5325 static void gen_mfrom(DisasContext
*ctx
)
5327 #if defined(CONFIG_USER_ONLY)
5328 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5330 if (unlikely(!ctx
->mem_idx
)) {
5331 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5334 gen_helper_602_mfrom(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
5338 /* 602 - 603 - G2 TLB management */
5341 static void gen_tlbld_6xx(DisasContext
*ctx
)
5343 #if defined(CONFIG_USER_ONLY)
5344 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5346 if (unlikely(!ctx
->mem_idx
)) {
5347 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5350 gen_helper_6xx_tlbd(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5355 static void gen_tlbli_6xx(DisasContext
*ctx
)
5357 #if defined(CONFIG_USER_ONLY)
5358 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5360 if (unlikely(!ctx
->mem_idx
)) {
5361 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5364 gen_helper_6xx_tlbi(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5368 /* 74xx TLB management */
5371 static void gen_tlbld_74xx(DisasContext
*ctx
)
5373 #if defined(CONFIG_USER_ONLY)
5374 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5376 if (unlikely(!ctx
->mem_idx
)) {
5377 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5380 gen_helper_74xx_tlbd(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5385 static void gen_tlbli_74xx(DisasContext
*ctx
)
5387 #if defined(CONFIG_USER_ONLY)
5388 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5390 if (unlikely(!ctx
->mem_idx
)) {
5391 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5394 gen_helper_74xx_tlbi(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5398 /* POWER instructions not in PowerPC 601 */
5401 static void gen_clf(DisasContext
*ctx
)
5403 /* Cache line flush: implemented as no-op */
5407 static void gen_cli(DisasContext
*ctx
)
5409 /* Cache line invalidate: privileged and treated as no-op */
5410 #if defined(CONFIG_USER_ONLY)
5411 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5413 if (unlikely(!ctx
->mem_idx
)) {
5414 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5421 static void gen_dclst(DisasContext
*ctx
)
5423 /* Data cache line store: treated as no-op */
5426 static void gen_mfsri(DisasContext
*ctx
)
5428 #if defined(CONFIG_USER_ONLY)
5429 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5431 int ra
= rA(ctx
->opcode
);
5432 int rd
= rD(ctx
->opcode
);
5434 if (unlikely(!ctx
->mem_idx
)) {
5435 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5438 t0
= tcg_temp_new();
5439 gen_addr_reg_index(ctx
, t0
);
5440 tcg_gen_shri_tl(t0
, t0
, 28);
5441 tcg_gen_andi_tl(t0
, t0
, 0xF);
5442 gen_helper_load_sr(cpu_gpr
[rd
], cpu_env
, t0
);
5444 if (ra
!= 0 && ra
!= rd
)
5445 tcg_gen_mov_tl(cpu_gpr
[ra
], cpu_gpr
[rd
]);
5449 static void gen_rac(DisasContext
*ctx
)
5451 #if defined(CONFIG_USER_ONLY)
5452 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5455 if (unlikely(!ctx
->mem_idx
)) {
5456 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5459 t0
= tcg_temp_new();
5460 gen_addr_reg_index(ctx
, t0
);
5461 gen_helper_rac(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
5466 static void gen_rfsvc(DisasContext
*ctx
)
5468 #if defined(CONFIG_USER_ONLY)
5469 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5471 if (unlikely(!ctx
->mem_idx
)) {
5472 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5475 gen_helper_rfsvc(cpu_env
);
5476 gen_sync_exception(ctx
);
5480 /* svc is not implemented for now */
5482 /* POWER2 specific instructions */
5483 /* Quad manipulation (load/store two floats at a time) */
5486 static void gen_lfq(DisasContext
*ctx
)
5488 int rd
= rD(ctx
->opcode
);
5490 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5491 t0
= tcg_temp_new();
5492 gen_addr_imm_index(ctx
, t0
, 0);
5493 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
5494 gen_addr_add(ctx
, t0
, t0
, 8);
5495 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
5500 static void gen_lfqu(DisasContext
*ctx
)
5502 int ra
= rA(ctx
->opcode
);
5503 int rd
= rD(ctx
->opcode
);
5505 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5506 t0
= tcg_temp_new();
5507 t1
= tcg_temp_new();
5508 gen_addr_imm_index(ctx
, t0
, 0);
5509 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
5510 gen_addr_add(ctx
, t1
, t0
, 8);
5511 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
5513 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
5519 static void gen_lfqux(DisasContext
*ctx
)
5521 int ra
= rA(ctx
->opcode
);
5522 int rd
= rD(ctx
->opcode
);
5523 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5525 t0
= tcg_temp_new();
5526 gen_addr_reg_index(ctx
, t0
);
5527 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
5528 t1
= tcg_temp_new();
5529 gen_addr_add(ctx
, t1
, t0
, 8);
5530 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
5533 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
5538 static void gen_lfqx(DisasContext
*ctx
)
5540 int rd
= rD(ctx
->opcode
);
5542 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5543 t0
= tcg_temp_new();
5544 gen_addr_reg_index(ctx
, t0
);
5545 gen_qemu_ld64(ctx
, cpu_fpr
[rd
], t0
);
5546 gen_addr_add(ctx
, t0
, t0
, 8);
5547 gen_qemu_ld64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
5552 static void gen_stfq(DisasContext
*ctx
)
5554 int rd
= rD(ctx
->opcode
);
5556 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5557 t0
= tcg_temp_new();
5558 gen_addr_imm_index(ctx
, t0
, 0);
5559 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
5560 gen_addr_add(ctx
, t0
, t0
, 8);
5561 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
5566 static void gen_stfqu(DisasContext
*ctx
)
5568 int ra
= rA(ctx
->opcode
);
5569 int rd
= rD(ctx
->opcode
);
5571 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5572 t0
= tcg_temp_new();
5573 gen_addr_imm_index(ctx
, t0
, 0);
5574 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
5575 t1
= tcg_temp_new();
5576 gen_addr_add(ctx
, t1
, t0
, 8);
5577 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
5580 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
5585 static void gen_stfqux(DisasContext
*ctx
)
5587 int ra
= rA(ctx
->opcode
);
5588 int rd
= rD(ctx
->opcode
);
5590 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5591 t0
= tcg_temp_new();
5592 gen_addr_reg_index(ctx
, t0
);
5593 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
5594 t1
= tcg_temp_new();
5595 gen_addr_add(ctx
, t1
, t0
, 8);
5596 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t1
);
5599 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
5604 static void gen_stfqx(DisasContext
*ctx
)
5606 int rd
= rD(ctx
->opcode
);
5608 gen_set_access_type(ctx
, ACCESS_FLOAT
);
5609 t0
= tcg_temp_new();
5610 gen_addr_reg_index(ctx
, t0
);
5611 gen_qemu_st64(ctx
, cpu_fpr
[rd
], t0
);
5612 gen_addr_add(ctx
, t0
, t0
, 8);
5613 gen_qemu_st64(ctx
, cpu_fpr
[(rd
+ 1) % 32], t0
);
5617 /* BookE specific instructions */
5619 /* XXX: not implemented on 440 ? */
5620 static void gen_mfapidi(DisasContext
*ctx
)
5623 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
5626 /* XXX: not implemented on 440 ? */
5627 static void gen_tlbiva(DisasContext
*ctx
)
5629 #if defined(CONFIG_USER_ONLY)
5630 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5633 if (unlikely(!ctx
->mem_idx
)) {
5634 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5637 t0
= tcg_temp_new();
5638 gen_addr_reg_index(ctx
, t0
);
5639 gen_helper_tlbie(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
5644 /* All 405 MAC instructions are translated here */
5645 static inline void gen_405_mulladd_insn(DisasContext
*ctx
, int opc2
, int opc3
,
5646 int ra
, int rb
, int rt
, int Rc
)
5650 t0
= tcg_temp_local_new();
5651 t1
= tcg_temp_local_new();
5653 switch (opc3
& 0x0D) {
5655 /* macchw - macchw. - macchwo - macchwo. */
5656 /* macchws - macchws. - macchwso - macchwso. */
5657 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5658 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5659 /* mulchw - mulchw. */
5660 tcg_gen_ext16s_tl(t0
, cpu_gpr
[ra
]);
5661 tcg_gen_sari_tl(t1
, cpu_gpr
[rb
], 16);
5662 tcg_gen_ext16s_tl(t1
, t1
);
5665 /* macchwu - macchwu. - macchwuo - macchwuo. */
5666 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5667 /* mulchwu - mulchwu. */
5668 tcg_gen_ext16u_tl(t0
, cpu_gpr
[ra
]);
5669 tcg_gen_shri_tl(t1
, cpu_gpr
[rb
], 16);
5670 tcg_gen_ext16u_tl(t1
, t1
);
5673 /* machhw - machhw. - machhwo - machhwo. */
5674 /* machhws - machhws. - machhwso - machhwso. */
5675 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5676 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5677 /* mulhhw - mulhhw. */
5678 tcg_gen_sari_tl(t0
, cpu_gpr
[ra
], 16);
5679 tcg_gen_ext16s_tl(t0
, t0
);
5680 tcg_gen_sari_tl(t1
, cpu_gpr
[rb
], 16);
5681 tcg_gen_ext16s_tl(t1
, t1
);
5684 /* machhwu - machhwu. - machhwuo - machhwuo. */
5685 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5686 /* mulhhwu - mulhhwu. */
5687 tcg_gen_shri_tl(t0
, cpu_gpr
[ra
], 16);
5688 tcg_gen_ext16u_tl(t0
, t0
);
5689 tcg_gen_shri_tl(t1
, cpu_gpr
[rb
], 16);
5690 tcg_gen_ext16u_tl(t1
, t1
);
5693 /* maclhw - maclhw. - maclhwo - maclhwo. */
5694 /* maclhws - maclhws. - maclhwso - maclhwso. */
5695 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5696 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5697 /* mullhw - mullhw. */
5698 tcg_gen_ext16s_tl(t0
, cpu_gpr
[ra
]);
5699 tcg_gen_ext16s_tl(t1
, cpu_gpr
[rb
]);
5702 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5703 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5704 /* mullhwu - mullhwu. */
5705 tcg_gen_ext16u_tl(t0
, cpu_gpr
[ra
]);
5706 tcg_gen_ext16u_tl(t1
, cpu_gpr
[rb
]);
5710 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5711 tcg_gen_mul_tl(t1
, t0
, t1
);
5713 /* nmultiply-and-accumulate (0x0E) */
5714 tcg_gen_sub_tl(t0
, cpu_gpr
[rt
], t1
);
5716 /* multiply-and-accumulate (0x0C) */
5717 tcg_gen_add_tl(t0
, cpu_gpr
[rt
], t1
);
5721 /* Check overflow and/or saturate */
5722 int l1
= gen_new_label();
5725 /* Start with XER OV disabled, the most likely case */
5726 tcg_gen_movi_tl(cpu_ov
, 0);
5730 tcg_gen_xor_tl(t1
, cpu_gpr
[rt
], t1
);
5731 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
5732 tcg_gen_xor_tl(t1
, cpu_gpr
[rt
], t0
);
5733 tcg_gen_brcondi_tl(TCG_COND_LT
, t1
, 0, l1
);
5736 tcg_gen_sari_tl(t0
, cpu_gpr
[rt
], 31);
5737 tcg_gen_xori_tl(t0
, t0
, 0x7fffffff);
5741 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5744 tcg_gen_movi_tl(t0
, UINT32_MAX
);
5748 /* Check overflow */
5749 tcg_gen_movi_tl(cpu_ov
, 1);
5750 tcg_gen_movi_tl(cpu_so
, 1);
5753 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
5756 tcg_gen_mul_tl(cpu_gpr
[rt
], t0
, t1
);
5760 if (unlikely(Rc
) != 0) {
5762 gen_set_Rc0(ctx
, cpu_gpr
[rt
]);
5766 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5767 static void glue(gen_, name)(DisasContext *ctx) \
5769 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5770 rD(ctx->opcode), Rc(ctx->opcode)); \
5773 /* macchw - macchw. */
5774 GEN_MAC_HANDLER(macchw
, 0x0C, 0x05);
5775 /* macchwo - macchwo. */
5776 GEN_MAC_HANDLER(macchwo
, 0x0C, 0x15);
5777 /* macchws - macchws. */
5778 GEN_MAC_HANDLER(macchws
, 0x0C, 0x07);
5779 /* macchwso - macchwso. */
5780 GEN_MAC_HANDLER(macchwso
, 0x0C, 0x17);
5781 /* macchwsu - macchwsu. */
5782 GEN_MAC_HANDLER(macchwsu
, 0x0C, 0x06);
5783 /* macchwsuo - macchwsuo. */
5784 GEN_MAC_HANDLER(macchwsuo
, 0x0C, 0x16);
5785 /* macchwu - macchwu. */
5786 GEN_MAC_HANDLER(macchwu
, 0x0C, 0x04);
5787 /* macchwuo - macchwuo. */
5788 GEN_MAC_HANDLER(macchwuo
, 0x0C, 0x14);
5789 /* machhw - machhw. */
5790 GEN_MAC_HANDLER(machhw
, 0x0C, 0x01);
5791 /* machhwo - machhwo. */
5792 GEN_MAC_HANDLER(machhwo
, 0x0C, 0x11);
5793 /* machhws - machhws. */
5794 GEN_MAC_HANDLER(machhws
, 0x0C, 0x03);
5795 /* machhwso - machhwso. */
5796 GEN_MAC_HANDLER(machhwso
, 0x0C, 0x13);
5797 /* machhwsu - machhwsu. */
5798 GEN_MAC_HANDLER(machhwsu
, 0x0C, 0x02);
5799 /* machhwsuo - machhwsuo. */
5800 GEN_MAC_HANDLER(machhwsuo
, 0x0C, 0x12);
5801 /* machhwu - machhwu. */
5802 GEN_MAC_HANDLER(machhwu
, 0x0C, 0x00);
5803 /* machhwuo - machhwuo. */
5804 GEN_MAC_HANDLER(machhwuo
, 0x0C, 0x10);
5805 /* maclhw - maclhw. */
5806 GEN_MAC_HANDLER(maclhw
, 0x0C, 0x0D);
5807 /* maclhwo - maclhwo. */
5808 GEN_MAC_HANDLER(maclhwo
, 0x0C, 0x1D);
5809 /* maclhws - maclhws. */
5810 GEN_MAC_HANDLER(maclhws
, 0x0C, 0x0F);
5811 /* maclhwso - maclhwso. */
5812 GEN_MAC_HANDLER(maclhwso
, 0x0C, 0x1F);
5813 /* maclhwu - maclhwu. */
5814 GEN_MAC_HANDLER(maclhwu
, 0x0C, 0x0C);
5815 /* maclhwuo - maclhwuo. */
5816 GEN_MAC_HANDLER(maclhwuo
, 0x0C, 0x1C);
5817 /* maclhwsu - maclhwsu. */
5818 GEN_MAC_HANDLER(maclhwsu
, 0x0C, 0x0E);
5819 /* maclhwsuo - maclhwsuo. */
5820 GEN_MAC_HANDLER(maclhwsuo
, 0x0C, 0x1E);
5821 /* nmacchw - nmacchw. */
5822 GEN_MAC_HANDLER(nmacchw
, 0x0E, 0x05);
5823 /* nmacchwo - nmacchwo. */
5824 GEN_MAC_HANDLER(nmacchwo
, 0x0E, 0x15);
5825 /* nmacchws - nmacchws. */
5826 GEN_MAC_HANDLER(nmacchws
, 0x0E, 0x07);
5827 /* nmacchwso - nmacchwso. */
5828 GEN_MAC_HANDLER(nmacchwso
, 0x0E, 0x17);
5829 /* nmachhw - nmachhw. */
5830 GEN_MAC_HANDLER(nmachhw
, 0x0E, 0x01);
5831 /* nmachhwo - nmachhwo. */
5832 GEN_MAC_HANDLER(nmachhwo
, 0x0E, 0x11);
5833 /* nmachhws - nmachhws. */
5834 GEN_MAC_HANDLER(nmachhws
, 0x0E, 0x03);
5835 /* nmachhwso - nmachhwso. */
5836 GEN_MAC_HANDLER(nmachhwso
, 0x0E, 0x13);
5837 /* nmaclhw - nmaclhw. */
5838 GEN_MAC_HANDLER(nmaclhw
, 0x0E, 0x0D);
5839 /* nmaclhwo - nmaclhwo. */
5840 GEN_MAC_HANDLER(nmaclhwo
, 0x0E, 0x1D);
5841 /* nmaclhws - nmaclhws. */
5842 GEN_MAC_HANDLER(nmaclhws
, 0x0E, 0x0F);
5843 /* nmaclhwso - nmaclhwso. */
5844 GEN_MAC_HANDLER(nmaclhwso
, 0x0E, 0x1F);
5846 /* mulchw - mulchw. */
5847 GEN_MAC_HANDLER(mulchw
, 0x08, 0x05);
5848 /* mulchwu - mulchwu. */
5849 GEN_MAC_HANDLER(mulchwu
, 0x08, 0x04);
5850 /* mulhhw - mulhhw. */
5851 GEN_MAC_HANDLER(mulhhw
, 0x08, 0x01);
5852 /* mulhhwu - mulhhwu. */
5853 GEN_MAC_HANDLER(mulhhwu
, 0x08, 0x00);
5854 /* mullhw - mullhw. */
5855 GEN_MAC_HANDLER(mullhw
, 0x08, 0x0D);
5856 /* mullhwu - mullhwu. */
5857 GEN_MAC_HANDLER(mullhwu
, 0x08, 0x0C);
5860 static void gen_mfdcr(DisasContext
*ctx
)
5862 #if defined(CONFIG_USER_ONLY)
5863 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5866 if (unlikely(!ctx
->mem_idx
)) {
5867 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5870 /* NIP cannot be restored if the memory exception comes from an helper */
5871 gen_update_nip(ctx
, ctx
->nip
- 4);
5872 dcrn
= tcg_const_tl(SPR(ctx
->opcode
));
5873 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, dcrn
);
5874 tcg_temp_free(dcrn
);
5879 static void gen_mtdcr(DisasContext
*ctx
)
5881 #if defined(CONFIG_USER_ONLY)
5882 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5885 if (unlikely(!ctx
->mem_idx
)) {
5886 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5889 /* NIP cannot be restored if the memory exception comes from an helper */
5890 gen_update_nip(ctx
, ctx
->nip
- 4);
5891 dcrn
= tcg_const_tl(SPR(ctx
->opcode
));
5892 gen_helper_store_dcr(cpu_env
, dcrn
, cpu_gpr
[rS(ctx
->opcode
)]);
5893 tcg_temp_free(dcrn
);
5898 /* XXX: not implemented on 440 ? */
5899 static void gen_mfdcrx(DisasContext
*ctx
)
5901 #if defined(CONFIG_USER_ONLY)
5902 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5904 if (unlikely(!ctx
->mem_idx
)) {
5905 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5908 /* NIP cannot be restored if the memory exception comes from an helper */
5909 gen_update_nip(ctx
, ctx
->nip
- 4);
5910 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
5911 cpu_gpr
[rA(ctx
->opcode
)]);
5912 /* Note: Rc update flag set leads to undefined state of Rc0 */
5917 /* XXX: not implemented on 440 ? */
5918 static void gen_mtdcrx(DisasContext
*ctx
)
5920 #if defined(CONFIG_USER_ONLY)
5921 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5923 if (unlikely(!ctx
->mem_idx
)) {
5924 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_REG
);
5927 /* NIP cannot be restored if the memory exception comes from an helper */
5928 gen_update_nip(ctx
, ctx
->nip
- 4);
5929 gen_helper_store_dcr(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
5930 cpu_gpr
[rS(ctx
->opcode
)]);
5931 /* Note: Rc update flag set leads to undefined state of Rc0 */
5935 /* mfdcrux (PPC 460) : user-mode access to DCR */
5936 static void gen_mfdcrux(DisasContext
*ctx
)
5938 /* NIP cannot be restored if the memory exception comes from an helper */
5939 gen_update_nip(ctx
, ctx
->nip
- 4);
5940 gen_helper_load_dcr(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
5941 cpu_gpr
[rA(ctx
->opcode
)]);
5942 /* Note: Rc update flag set leads to undefined state of Rc0 */
5945 /* mtdcrux (PPC 460) : user-mode access to DCR */
5946 static void gen_mtdcrux(DisasContext
*ctx
)
5948 /* NIP cannot be restored if the memory exception comes from an helper */
5949 gen_update_nip(ctx
, ctx
->nip
- 4);
5950 gen_helper_store_dcr(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
5951 cpu_gpr
[rS(ctx
->opcode
)]);
5952 /* Note: Rc update flag set leads to undefined state of Rc0 */
5956 static void gen_dccci(DisasContext
*ctx
)
5958 #if defined(CONFIG_USER_ONLY)
5959 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5961 if (unlikely(!ctx
->mem_idx
)) {
5962 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5965 /* interpreted as no-op */
5970 static void gen_dcread(DisasContext
*ctx
)
5972 #if defined(CONFIG_USER_ONLY)
5973 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5976 if (unlikely(!ctx
->mem_idx
)) {
5977 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
5980 gen_set_access_type(ctx
, ACCESS_CACHE
);
5981 EA
= tcg_temp_new();
5982 gen_addr_reg_index(ctx
, EA
);
5983 val
= tcg_temp_new();
5984 gen_qemu_ld32u(ctx
, val
, EA
);
5986 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], EA
);
5992 static void gen_icbt_40x(DisasContext
*ctx
)
5994 /* interpreted as no-op */
5995 /* XXX: specification say this is treated as a load by the MMU
5996 * but does not generate any exception
6001 static void gen_iccci(DisasContext
*ctx
)
6003 #if defined(CONFIG_USER_ONLY)
6004 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6006 if (unlikely(!ctx
->mem_idx
)) {
6007 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6010 /* interpreted as no-op */
6015 static void gen_icread(DisasContext
*ctx
)
6017 #if defined(CONFIG_USER_ONLY)
6018 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6020 if (unlikely(!ctx
->mem_idx
)) {
6021 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6024 /* interpreted as no-op */
6028 /* rfci (mem_idx only) */
6029 static void gen_rfci_40x(DisasContext
*ctx
)
6031 #if defined(CONFIG_USER_ONLY)
6032 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6034 if (unlikely(!ctx
->mem_idx
)) {
6035 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6038 /* Restore CPU state */
6039 gen_helper_40x_rfci(cpu_env
);
6040 gen_sync_exception(ctx
);
6044 static void gen_rfci(DisasContext
*ctx
)
6046 #if defined(CONFIG_USER_ONLY)
6047 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6049 if (unlikely(!ctx
->mem_idx
)) {
6050 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6053 /* Restore CPU state */
6054 gen_helper_rfci(cpu_env
);
6055 gen_sync_exception(ctx
);
6059 /* BookE specific */
6061 /* XXX: not implemented on 440 ? */
6062 static void gen_rfdi(DisasContext
*ctx
)
6064 #if defined(CONFIG_USER_ONLY)
6065 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6067 if (unlikely(!ctx
->mem_idx
)) {
6068 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6071 /* Restore CPU state */
6072 gen_helper_rfdi(cpu_env
);
6073 gen_sync_exception(ctx
);
6077 /* XXX: not implemented on 440 ? */
6078 static void gen_rfmci(DisasContext
*ctx
)
6080 #if defined(CONFIG_USER_ONLY)
6081 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6083 if (unlikely(!ctx
->mem_idx
)) {
6084 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6087 /* Restore CPU state */
6088 gen_helper_rfmci(cpu_env
);
6089 gen_sync_exception(ctx
);
6093 /* TLB management - PowerPC 405 implementation */
6096 static void gen_tlbre_40x(DisasContext
*ctx
)
6098 #if defined(CONFIG_USER_ONLY)
6099 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6101 if (unlikely(!ctx
->mem_idx
)) {
6102 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6105 switch (rB(ctx
->opcode
)) {
6107 gen_helper_4xx_tlbre_hi(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6108 cpu_gpr
[rA(ctx
->opcode
)]);
6111 gen_helper_4xx_tlbre_lo(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6112 cpu_gpr
[rA(ctx
->opcode
)]);
6115 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6121 /* tlbsx - tlbsx. */
6122 static void gen_tlbsx_40x(DisasContext
*ctx
)
6124 #if defined(CONFIG_USER_ONLY)
6125 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6128 if (unlikely(!ctx
->mem_idx
)) {
6129 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6132 t0
= tcg_temp_new();
6133 gen_addr_reg_index(ctx
, t0
);
6134 gen_helper_4xx_tlbsx(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
6136 if (Rc(ctx
->opcode
)) {
6137 int l1
= gen_new_label();
6138 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
6139 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rD(ctx
->opcode
)], -1, l1
);
6140 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 0x02);
6147 static void gen_tlbwe_40x(DisasContext
*ctx
)
6149 #if defined(CONFIG_USER_ONLY)
6150 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6152 if (unlikely(!ctx
->mem_idx
)) {
6153 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6156 switch (rB(ctx
->opcode
)) {
6158 gen_helper_4xx_tlbwe_hi(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6159 cpu_gpr
[rS(ctx
->opcode
)]);
6162 gen_helper_4xx_tlbwe_lo(cpu_env
, cpu_gpr
[rA(ctx
->opcode
)],
6163 cpu_gpr
[rS(ctx
->opcode
)]);
6166 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6172 /* TLB management - PowerPC 440 implementation */
6175 static void gen_tlbre_440(DisasContext
*ctx
)
6177 #if defined(CONFIG_USER_ONLY)
6178 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6180 if (unlikely(!ctx
->mem_idx
)) {
6181 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6184 switch (rB(ctx
->opcode
)) {
6189 TCGv_i32 t0
= tcg_const_i32(rB(ctx
->opcode
));
6190 gen_helper_440_tlbre(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
,
6191 t0
, cpu_gpr
[rA(ctx
->opcode
)]);
6192 tcg_temp_free_i32(t0
);
6196 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6202 /* tlbsx - tlbsx. */
6203 static void gen_tlbsx_440(DisasContext
*ctx
)
6205 #if defined(CONFIG_USER_ONLY)
6206 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6209 if (unlikely(!ctx
->mem_idx
)) {
6210 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6213 t0
= tcg_temp_new();
6214 gen_addr_reg_index(ctx
, t0
);
6215 gen_helper_440_tlbsx(cpu_gpr
[rD(ctx
->opcode
)], cpu_env
, t0
);
6217 if (Rc(ctx
->opcode
)) {
6218 int l1
= gen_new_label();
6219 tcg_gen_trunc_tl_i32(cpu_crf
[0], cpu_so
);
6220 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rD(ctx
->opcode
)], -1, l1
);
6221 tcg_gen_ori_i32(cpu_crf
[0], cpu_crf
[0], 0x02);
6228 static void gen_tlbwe_440(DisasContext
*ctx
)
6230 #if defined(CONFIG_USER_ONLY)
6231 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6233 if (unlikely(!ctx
->mem_idx
)) {
6234 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6237 switch (rB(ctx
->opcode
)) {
6242 TCGv_i32 t0
= tcg_const_i32(rB(ctx
->opcode
));
6243 gen_helper_440_tlbwe(cpu_env
, t0
, cpu_gpr
[rA(ctx
->opcode
)],
6244 cpu_gpr
[rS(ctx
->opcode
)]);
6245 tcg_temp_free_i32(t0
);
6249 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6255 /* TLB management - PowerPC BookE 2.06 implementation */
6258 static void gen_tlbre_booke206(DisasContext
*ctx
)
6260 #if defined(CONFIG_USER_ONLY)
6261 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6263 if (unlikely(!ctx
->mem_idx
)) {
6264 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6268 gen_helper_booke206_tlbre(cpu_env
);
6272 /* tlbsx - tlbsx. */
6273 static void gen_tlbsx_booke206(DisasContext
*ctx
)
6275 #if defined(CONFIG_USER_ONLY)
6276 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6279 if (unlikely(!ctx
->mem_idx
)) {
6280 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6284 if (rA(ctx
->opcode
)) {
6285 t0
= tcg_temp_new();
6286 tcg_gen_mov_tl(t0
, cpu_gpr
[rD(ctx
->opcode
)]);
6288 t0
= tcg_const_tl(0);
6291 tcg_gen_add_tl(t0
, t0
, cpu_gpr
[rB(ctx
->opcode
)]);
6292 gen_helper_booke206_tlbsx(cpu_env
, t0
);
6297 static void gen_tlbwe_booke206(DisasContext
*ctx
)
6299 #if defined(CONFIG_USER_ONLY)
6300 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6302 if (unlikely(!ctx
->mem_idx
)) {
6303 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6306 gen_update_nip(ctx
, ctx
->nip
- 4);
6307 gen_helper_booke206_tlbwe(cpu_env
);
6311 static void gen_tlbivax_booke206(DisasContext
*ctx
)
6313 #if defined(CONFIG_USER_ONLY)
6314 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6317 if (unlikely(!ctx
->mem_idx
)) {
6318 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6322 t0
= tcg_temp_new();
6323 gen_addr_reg_index(ctx
, t0
);
6325 gen_helper_booke206_tlbivax(cpu_env
, t0
);
6329 static void gen_tlbilx_booke206(DisasContext
*ctx
)
6331 #if defined(CONFIG_USER_ONLY)
6332 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6335 if (unlikely(!ctx
->mem_idx
)) {
6336 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6340 t0
= tcg_temp_new();
6341 gen_addr_reg_index(ctx
, t0
);
6343 switch((ctx
->opcode
>> 21) & 0x3) {
6345 gen_helper_booke206_tlbilx0(cpu_env
, t0
);
6348 gen_helper_booke206_tlbilx1(cpu_env
, t0
);
6351 gen_helper_booke206_tlbilx3(cpu_env
, t0
);
6354 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
6364 static void gen_wrtee(DisasContext
*ctx
)
6366 #if defined(CONFIG_USER_ONLY)
6367 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6370 if (unlikely(!ctx
->mem_idx
)) {
6371 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6374 t0
= tcg_temp_new();
6375 tcg_gen_andi_tl(t0
, cpu_gpr
[rD(ctx
->opcode
)], (1 << MSR_EE
));
6376 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(1 << MSR_EE
));
6377 tcg_gen_or_tl(cpu_msr
, cpu_msr
, t0
);
6379 /* Stop translation to have a chance to raise an exception
6380 * if we just set msr_ee to 1
6382 gen_stop_exception(ctx
);
6387 static void gen_wrteei(DisasContext
*ctx
)
6389 #if defined(CONFIG_USER_ONLY)
6390 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6392 if (unlikely(!ctx
->mem_idx
)) {
6393 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6396 if (ctx
->opcode
& 0x00008000) {
6397 tcg_gen_ori_tl(cpu_msr
, cpu_msr
, (1 << MSR_EE
));
6398 /* Stop translation to have a chance to raise an exception */
6399 gen_stop_exception(ctx
);
6401 tcg_gen_andi_tl(cpu_msr
, cpu_msr
, ~(1 << MSR_EE
));
6406 /* PowerPC 440 specific instructions */
6409 static void gen_dlmzb(DisasContext
*ctx
)
6411 TCGv_i32 t0
= tcg_const_i32(Rc(ctx
->opcode
));
6412 gen_helper_dlmzb(cpu_gpr
[rA(ctx
->opcode
)], cpu_env
,
6413 cpu_gpr
[rS(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)], t0
);
6414 tcg_temp_free_i32(t0
);
6417 /* mbar replaces eieio on 440 */
6418 static void gen_mbar(DisasContext
*ctx
)
6420 /* interpreted as no-op */
6423 /* msync replaces sync on 440 */
6424 static void gen_msync_4xx(DisasContext
*ctx
)
6426 /* interpreted as no-op */
6430 static void gen_icbt_440(DisasContext
*ctx
)
6432 /* interpreted as no-op */
6433 /* XXX: specification say this is treated as a load by the MMU
6434 * but does not generate any exception
6438 /* Embedded.Processor Control */
6440 static void gen_msgclr(DisasContext
*ctx
)
6442 #if defined(CONFIG_USER_ONLY)
6443 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6445 if (unlikely(ctx
->mem_idx
== 0)) {
6446 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6450 gen_helper_msgclr(cpu_env
, cpu_gpr
[rB(ctx
->opcode
)]);
6454 static void gen_msgsnd(DisasContext
*ctx
)
6456 #if defined(CONFIG_USER_ONLY)
6457 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6459 if (unlikely(ctx
->mem_idx
== 0)) {
6460 gen_inval_exception(ctx
, POWERPC_EXCP_PRIV_OPC
);
6464 gen_helper_msgsnd(cpu_gpr
[rB(ctx
->opcode
)]);
6468 /*** Altivec vector extension ***/
6469 /* Altivec registers moves */
6471 static inline TCGv_ptr
gen_avr_ptr(int reg
)
6473 TCGv_ptr r
= tcg_temp_new_ptr();
6474 tcg_gen_addi_ptr(r
, cpu_env
, offsetof(CPUPPCState
, avr
[reg
]));
6478 #define GEN_VR_LDX(name, opc2, opc3) \
6479 static void glue(gen_, name)(DisasContext *ctx) \
6482 if (unlikely(!ctx->altivec_enabled)) { \
6483 gen_exception(ctx, POWERPC_EXCP_VPU); \
6486 gen_set_access_type(ctx, ACCESS_INT); \
6487 EA = tcg_temp_new(); \
6488 gen_addr_reg_index(ctx, EA); \
6489 tcg_gen_andi_tl(EA, EA, ~0xf); \
6490 if (ctx->le_mode) { \
6491 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6492 tcg_gen_addi_tl(EA, EA, 8); \
6493 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6495 gen_qemu_ld64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6496 tcg_gen_addi_tl(EA, EA, 8); \
6497 gen_qemu_ld64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6499 tcg_temp_free(EA); \
6502 #define GEN_VR_STX(name, opc2, opc3) \
6503 static void gen_st##name(DisasContext *ctx) \
6506 if (unlikely(!ctx->altivec_enabled)) { \
6507 gen_exception(ctx, POWERPC_EXCP_VPU); \
6510 gen_set_access_type(ctx, ACCESS_INT); \
6511 EA = tcg_temp_new(); \
6512 gen_addr_reg_index(ctx, EA); \
6513 tcg_gen_andi_tl(EA, EA, ~0xf); \
6514 if (ctx->le_mode) { \
6515 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6516 tcg_gen_addi_tl(EA, EA, 8); \
6517 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6519 gen_qemu_st64(ctx, cpu_avrh[rD(ctx->opcode)], EA); \
6520 tcg_gen_addi_tl(EA, EA, 8); \
6521 gen_qemu_st64(ctx, cpu_avrl[rD(ctx->opcode)], EA); \
6523 tcg_temp_free(EA); \
6526 #define GEN_VR_LVE(name, opc2, opc3) \
6527 static void gen_lve##name(DisasContext *ctx) \
6531 if (unlikely(!ctx->altivec_enabled)) { \
6532 gen_exception(ctx, POWERPC_EXCP_VPU); \
6535 gen_set_access_type(ctx, ACCESS_INT); \
6536 EA = tcg_temp_new(); \
6537 gen_addr_reg_index(ctx, EA); \
6538 rs = gen_avr_ptr(rS(ctx->opcode)); \
6539 gen_helper_lve##name(cpu_env, rs, EA); \
6540 tcg_temp_free(EA); \
6541 tcg_temp_free_ptr(rs); \
6544 #define GEN_VR_STVE(name, opc2, opc3) \
6545 static void gen_stve##name(DisasContext *ctx) \
6549 if (unlikely(!ctx->altivec_enabled)) { \
6550 gen_exception(ctx, POWERPC_EXCP_VPU); \
6553 gen_set_access_type(ctx, ACCESS_INT); \
6554 EA = tcg_temp_new(); \
6555 gen_addr_reg_index(ctx, EA); \
6556 rs = gen_avr_ptr(rS(ctx->opcode)); \
6557 gen_helper_stve##name(cpu_env, rs, EA); \
6558 tcg_temp_free(EA); \
6559 tcg_temp_free_ptr(rs); \
6562 GEN_VR_LDX(lvx
, 0x07, 0x03);
6563 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
6564 GEN_VR_LDX(lvxl
, 0x07, 0x0B);
6566 GEN_VR_LVE(bx
, 0x07, 0x00);
6567 GEN_VR_LVE(hx
, 0x07, 0x01);
6568 GEN_VR_LVE(wx
, 0x07, 0x02);
6570 GEN_VR_STX(svx
, 0x07, 0x07);
6571 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
6572 GEN_VR_STX(svxl
, 0x07, 0x0F);
6574 GEN_VR_STVE(bx
, 0x07, 0x04);
6575 GEN_VR_STVE(hx
, 0x07, 0x05);
6576 GEN_VR_STVE(wx
, 0x07, 0x06);
6578 static void gen_lvsl(DisasContext
*ctx
)
6582 if (unlikely(!ctx
->altivec_enabled
)) {
6583 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6586 EA
= tcg_temp_new();
6587 gen_addr_reg_index(ctx
, EA
);
6588 rd
= gen_avr_ptr(rD(ctx
->opcode
));
6589 gen_helper_lvsl(rd
, EA
);
6591 tcg_temp_free_ptr(rd
);
6594 static void gen_lvsr(DisasContext
*ctx
)
6598 if (unlikely(!ctx
->altivec_enabled
)) {
6599 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6602 EA
= tcg_temp_new();
6603 gen_addr_reg_index(ctx
, EA
);
6604 rd
= gen_avr_ptr(rD(ctx
->opcode
));
6605 gen_helper_lvsr(rd
, EA
);
6607 tcg_temp_free_ptr(rd
);
6610 static void gen_mfvscr(DisasContext
*ctx
)
6613 if (unlikely(!ctx
->altivec_enabled
)) {
6614 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6617 tcg_gen_movi_i64(cpu_avrh
[rD(ctx
->opcode
)], 0);
6618 t
= tcg_temp_new_i32();
6619 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUPPCState
, vscr
));
6620 tcg_gen_extu_i32_i64(cpu_avrl
[rD(ctx
->opcode
)], t
);
6621 tcg_temp_free_i32(t
);
6624 static void gen_mtvscr(DisasContext
*ctx
)
6627 if (unlikely(!ctx
->altivec_enabled
)) {
6628 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6631 p
= gen_avr_ptr(rD(ctx
->opcode
));
6632 gen_helper_mtvscr(cpu_env
, p
);
6633 tcg_temp_free_ptr(p
);
6636 /* Logical operations */
6637 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
6638 static void glue(gen_, name)(DisasContext *ctx) \
6640 if (unlikely(!ctx->altivec_enabled)) { \
6641 gen_exception(ctx, POWERPC_EXCP_VPU); \
6644 tcg_op(cpu_avrh[rD(ctx->opcode)], cpu_avrh[rA(ctx->opcode)], cpu_avrh[rB(ctx->opcode)]); \
6645 tcg_op(cpu_avrl[rD(ctx->opcode)], cpu_avrl[rA(ctx->opcode)], cpu_avrl[rB(ctx->opcode)]); \
6648 GEN_VX_LOGICAL(vand
, tcg_gen_and_i64
, 2, 16);
6649 GEN_VX_LOGICAL(vandc
, tcg_gen_andc_i64
, 2, 17);
6650 GEN_VX_LOGICAL(vor
, tcg_gen_or_i64
, 2, 18);
6651 GEN_VX_LOGICAL(vxor
, tcg_gen_xor_i64
, 2, 19);
6652 GEN_VX_LOGICAL(vnor
, tcg_gen_nor_i64
, 2, 20);
6654 #define GEN_VXFORM(name, opc2, opc3) \
6655 static void glue(gen_, name)(DisasContext *ctx) \
6657 TCGv_ptr ra, rb, rd; \
6658 if (unlikely(!ctx->altivec_enabled)) { \
6659 gen_exception(ctx, POWERPC_EXCP_VPU); \
6662 ra = gen_avr_ptr(rA(ctx->opcode)); \
6663 rb = gen_avr_ptr(rB(ctx->opcode)); \
6664 rd = gen_avr_ptr(rD(ctx->opcode)); \
6665 gen_helper_##name (rd, ra, rb); \
6666 tcg_temp_free_ptr(ra); \
6667 tcg_temp_free_ptr(rb); \
6668 tcg_temp_free_ptr(rd); \
6671 #define GEN_VXFORM_ENV(name, opc2, opc3) \
6672 static void glue(gen_, name)(DisasContext *ctx) \
6674 TCGv_ptr ra, rb, rd; \
6675 if (unlikely(!ctx->altivec_enabled)) { \
6676 gen_exception(ctx, POWERPC_EXCP_VPU); \
6679 ra = gen_avr_ptr(rA(ctx->opcode)); \
6680 rb = gen_avr_ptr(rB(ctx->opcode)); \
6681 rd = gen_avr_ptr(rD(ctx->opcode)); \
6682 gen_helper_##name(cpu_env, rd, ra, rb); \
6683 tcg_temp_free_ptr(ra); \
6684 tcg_temp_free_ptr(rb); \
6685 tcg_temp_free_ptr(rd); \
6688 GEN_VXFORM(vaddubm
, 0, 0);
6689 GEN_VXFORM(vadduhm
, 0, 1);
6690 GEN_VXFORM(vadduwm
, 0, 2);
6691 GEN_VXFORM(vsububm
, 0, 16);
6692 GEN_VXFORM(vsubuhm
, 0, 17);
6693 GEN_VXFORM(vsubuwm
, 0, 18);
6694 GEN_VXFORM(vmaxub
, 1, 0);
6695 GEN_VXFORM(vmaxuh
, 1, 1);
6696 GEN_VXFORM(vmaxuw
, 1, 2);
6697 GEN_VXFORM(vmaxsb
, 1, 4);
6698 GEN_VXFORM(vmaxsh
, 1, 5);
6699 GEN_VXFORM(vmaxsw
, 1, 6);
6700 GEN_VXFORM(vminub
, 1, 8);
6701 GEN_VXFORM(vminuh
, 1, 9);
6702 GEN_VXFORM(vminuw
, 1, 10);
6703 GEN_VXFORM(vminsb
, 1, 12);
6704 GEN_VXFORM(vminsh
, 1, 13);
6705 GEN_VXFORM(vminsw
, 1, 14);
6706 GEN_VXFORM(vavgub
, 1, 16);
6707 GEN_VXFORM(vavguh
, 1, 17);
6708 GEN_VXFORM(vavguw
, 1, 18);
6709 GEN_VXFORM(vavgsb
, 1, 20);
6710 GEN_VXFORM(vavgsh
, 1, 21);
6711 GEN_VXFORM(vavgsw
, 1, 22);
6712 GEN_VXFORM(vmrghb
, 6, 0);
6713 GEN_VXFORM(vmrghh
, 6, 1);
6714 GEN_VXFORM(vmrghw
, 6, 2);
6715 GEN_VXFORM(vmrglb
, 6, 4);
6716 GEN_VXFORM(vmrglh
, 6, 5);
6717 GEN_VXFORM(vmrglw
, 6, 6);
6718 GEN_VXFORM(vmuloub
, 4, 0);
6719 GEN_VXFORM(vmulouh
, 4, 1);
6720 GEN_VXFORM(vmulosb
, 4, 4);
6721 GEN_VXFORM(vmulosh
, 4, 5);
6722 GEN_VXFORM(vmuleub
, 4, 8);
6723 GEN_VXFORM(vmuleuh
, 4, 9);
6724 GEN_VXFORM(vmulesb
, 4, 12);
6725 GEN_VXFORM(vmulesh
, 4, 13);
6726 GEN_VXFORM(vslb
, 2, 4);
6727 GEN_VXFORM(vslh
, 2, 5);
6728 GEN_VXFORM(vslw
, 2, 6);
6729 GEN_VXFORM(vsrb
, 2, 8);
6730 GEN_VXFORM(vsrh
, 2, 9);
6731 GEN_VXFORM(vsrw
, 2, 10);
6732 GEN_VXFORM(vsrab
, 2, 12);
6733 GEN_VXFORM(vsrah
, 2, 13);
6734 GEN_VXFORM(vsraw
, 2, 14);
6735 GEN_VXFORM(vslo
, 6, 16);
6736 GEN_VXFORM(vsro
, 6, 17);
6737 GEN_VXFORM(vaddcuw
, 0, 6);
6738 GEN_VXFORM(vsubcuw
, 0, 22);
6739 GEN_VXFORM_ENV(vaddubs
, 0, 8);
6740 GEN_VXFORM_ENV(vadduhs
, 0, 9);
6741 GEN_VXFORM_ENV(vadduws
, 0, 10);
6742 GEN_VXFORM_ENV(vaddsbs
, 0, 12);
6743 GEN_VXFORM_ENV(vaddshs
, 0, 13);
6744 GEN_VXFORM_ENV(vaddsws
, 0, 14);
6745 GEN_VXFORM_ENV(vsububs
, 0, 24);
6746 GEN_VXFORM_ENV(vsubuhs
, 0, 25);
6747 GEN_VXFORM_ENV(vsubuws
, 0, 26);
6748 GEN_VXFORM_ENV(vsubsbs
, 0, 28);
6749 GEN_VXFORM_ENV(vsubshs
, 0, 29);
6750 GEN_VXFORM_ENV(vsubsws
, 0, 30);
6751 GEN_VXFORM(vrlb
, 2, 0);
6752 GEN_VXFORM(vrlh
, 2, 1);
6753 GEN_VXFORM(vrlw
, 2, 2);
6754 GEN_VXFORM(vsl
, 2, 7);
6755 GEN_VXFORM(vsr
, 2, 11);
6756 GEN_VXFORM_ENV(vpkuhum
, 7, 0);
6757 GEN_VXFORM_ENV(vpkuwum
, 7, 1);
6758 GEN_VXFORM_ENV(vpkuhus
, 7, 2);
6759 GEN_VXFORM_ENV(vpkuwus
, 7, 3);
6760 GEN_VXFORM_ENV(vpkshus
, 7, 4);
6761 GEN_VXFORM_ENV(vpkswus
, 7, 5);
6762 GEN_VXFORM_ENV(vpkshss
, 7, 6);
6763 GEN_VXFORM_ENV(vpkswss
, 7, 7);
6764 GEN_VXFORM(vpkpx
, 7, 12);
6765 GEN_VXFORM_ENV(vsum4ubs
, 4, 24);
6766 GEN_VXFORM_ENV(vsum4sbs
, 4, 28);
6767 GEN_VXFORM_ENV(vsum4shs
, 4, 25);
6768 GEN_VXFORM_ENV(vsum2sws
, 4, 26);
6769 GEN_VXFORM_ENV(vsumsws
, 4, 30);
6770 GEN_VXFORM_ENV(vaddfp
, 5, 0);
6771 GEN_VXFORM_ENV(vsubfp
, 5, 1);
6772 GEN_VXFORM_ENV(vmaxfp
, 5, 16);
6773 GEN_VXFORM_ENV(vminfp
, 5, 17);
6775 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
6776 static void glue(gen_, name)(DisasContext *ctx) \
6778 TCGv_ptr ra, rb, rd; \
6779 if (unlikely(!ctx->altivec_enabled)) { \
6780 gen_exception(ctx, POWERPC_EXCP_VPU); \
6783 ra = gen_avr_ptr(rA(ctx->opcode)); \
6784 rb = gen_avr_ptr(rB(ctx->opcode)); \
6785 rd = gen_avr_ptr(rD(ctx->opcode)); \
6786 gen_helper_##opname(cpu_env, rd, ra, rb); \
6787 tcg_temp_free_ptr(ra); \
6788 tcg_temp_free_ptr(rb); \
6789 tcg_temp_free_ptr(rd); \
6792 #define GEN_VXRFORM(name, opc2, opc3) \
6793 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
6794 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
6796 GEN_VXRFORM(vcmpequb
, 3, 0)
6797 GEN_VXRFORM(vcmpequh
, 3, 1)
6798 GEN_VXRFORM(vcmpequw
, 3, 2)
6799 GEN_VXRFORM(vcmpgtsb
, 3, 12)
6800 GEN_VXRFORM(vcmpgtsh
, 3, 13)
6801 GEN_VXRFORM(vcmpgtsw
, 3, 14)
6802 GEN_VXRFORM(vcmpgtub
, 3, 8)
6803 GEN_VXRFORM(vcmpgtuh
, 3, 9)
6804 GEN_VXRFORM(vcmpgtuw
, 3, 10)
6805 GEN_VXRFORM(vcmpeqfp
, 3, 3)
6806 GEN_VXRFORM(vcmpgefp
, 3, 7)
6807 GEN_VXRFORM(vcmpgtfp
, 3, 11)
6808 GEN_VXRFORM(vcmpbfp
, 3, 15)
6810 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6811 static void glue(gen_, name)(DisasContext *ctx) \
6815 if (unlikely(!ctx->altivec_enabled)) { \
6816 gen_exception(ctx, POWERPC_EXCP_VPU); \
6819 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6820 rd = gen_avr_ptr(rD(ctx->opcode)); \
6821 gen_helper_##name (rd, simm); \
6822 tcg_temp_free_i32(simm); \
6823 tcg_temp_free_ptr(rd); \
6826 GEN_VXFORM_SIMM(vspltisb
, 6, 12);
6827 GEN_VXFORM_SIMM(vspltish
, 6, 13);
6828 GEN_VXFORM_SIMM(vspltisw
, 6, 14);
6830 #define GEN_VXFORM_NOA(name, opc2, opc3) \
6831 static void glue(gen_, name)(DisasContext *ctx) \
6834 if (unlikely(!ctx->altivec_enabled)) { \
6835 gen_exception(ctx, POWERPC_EXCP_VPU); \
6838 rb = gen_avr_ptr(rB(ctx->opcode)); \
6839 rd = gen_avr_ptr(rD(ctx->opcode)); \
6840 gen_helper_##name (rd, rb); \
6841 tcg_temp_free_ptr(rb); \
6842 tcg_temp_free_ptr(rd); \
6845 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
6846 static void glue(gen_, name)(DisasContext *ctx) \
6850 if (unlikely(!ctx->altivec_enabled)) { \
6851 gen_exception(ctx, POWERPC_EXCP_VPU); \
6854 rb = gen_avr_ptr(rB(ctx->opcode)); \
6855 rd = gen_avr_ptr(rD(ctx->opcode)); \
6856 gen_helper_##name(cpu_env, rd, rb); \
6857 tcg_temp_free_ptr(rb); \
6858 tcg_temp_free_ptr(rd); \
6861 GEN_VXFORM_NOA(vupkhsb
, 7, 8);
6862 GEN_VXFORM_NOA(vupkhsh
, 7, 9);
6863 GEN_VXFORM_NOA(vupklsb
, 7, 10);
6864 GEN_VXFORM_NOA(vupklsh
, 7, 11);
6865 GEN_VXFORM_NOA(vupkhpx
, 7, 13);
6866 GEN_VXFORM_NOA(vupklpx
, 7, 15);
6867 GEN_VXFORM_NOA_ENV(vrefp
, 5, 4);
6868 GEN_VXFORM_NOA_ENV(vrsqrtefp
, 5, 5);
6869 GEN_VXFORM_NOA_ENV(vexptefp
, 5, 6);
6870 GEN_VXFORM_NOA_ENV(vlogefp
, 5, 7);
6871 GEN_VXFORM_NOA_ENV(vrfim
, 5, 8);
6872 GEN_VXFORM_NOA_ENV(vrfin
, 5, 9);
6873 GEN_VXFORM_NOA_ENV(vrfip
, 5, 10);
6874 GEN_VXFORM_NOA_ENV(vrfiz
, 5, 11);
6876 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
6877 static void glue(gen_, name)(DisasContext *ctx) \
6881 if (unlikely(!ctx->altivec_enabled)) { \
6882 gen_exception(ctx, POWERPC_EXCP_VPU); \
6885 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
6886 rd = gen_avr_ptr(rD(ctx->opcode)); \
6887 gen_helper_##name (rd, simm); \
6888 tcg_temp_free_i32(simm); \
6889 tcg_temp_free_ptr(rd); \
6892 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
6893 static void glue(gen_, name)(DisasContext *ctx) \
6897 if (unlikely(!ctx->altivec_enabled)) { \
6898 gen_exception(ctx, POWERPC_EXCP_VPU); \
6901 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6902 rb = gen_avr_ptr(rB(ctx->opcode)); \
6903 rd = gen_avr_ptr(rD(ctx->opcode)); \
6904 gen_helper_##name (rd, rb, uimm); \
6905 tcg_temp_free_i32(uimm); \
6906 tcg_temp_free_ptr(rb); \
6907 tcg_temp_free_ptr(rd); \
6910 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
6911 static void glue(gen_, name)(DisasContext *ctx) \
6916 if (unlikely(!ctx->altivec_enabled)) { \
6917 gen_exception(ctx, POWERPC_EXCP_VPU); \
6920 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
6921 rb = gen_avr_ptr(rB(ctx->opcode)); \
6922 rd = gen_avr_ptr(rD(ctx->opcode)); \
6923 gen_helper_##name(cpu_env, rd, rb, uimm); \
6924 tcg_temp_free_i32(uimm); \
6925 tcg_temp_free_ptr(rb); \
6926 tcg_temp_free_ptr(rd); \
6929 GEN_VXFORM_UIMM(vspltb
, 6, 8);
6930 GEN_VXFORM_UIMM(vsplth
, 6, 9);
6931 GEN_VXFORM_UIMM(vspltw
, 6, 10);
6932 GEN_VXFORM_UIMM_ENV(vcfux
, 5, 12);
6933 GEN_VXFORM_UIMM_ENV(vcfsx
, 5, 13);
6934 GEN_VXFORM_UIMM_ENV(vctuxs
, 5, 14);
6935 GEN_VXFORM_UIMM_ENV(vctsxs
, 5, 15);
6937 static void gen_vsldoi(DisasContext
*ctx
)
6939 TCGv_ptr ra
, rb
, rd
;
6941 if (unlikely(!ctx
->altivec_enabled
)) {
6942 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6945 ra
= gen_avr_ptr(rA(ctx
->opcode
));
6946 rb
= gen_avr_ptr(rB(ctx
->opcode
));
6947 rd
= gen_avr_ptr(rD(ctx
->opcode
));
6948 sh
= tcg_const_i32(VSH(ctx
->opcode
));
6949 gen_helper_vsldoi (rd
, ra
, rb
, sh
);
6950 tcg_temp_free_ptr(ra
);
6951 tcg_temp_free_ptr(rb
);
6952 tcg_temp_free_ptr(rd
);
6953 tcg_temp_free_i32(sh
);
6956 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
6957 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
6959 TCGv_ptr ra, rb, rc, rd; \
6960 if (unlikely(!ctx->altivec_enabled)) { \
6961 gen_exception(ctx, POWERPC_EXCP_VPU); \
6964 ra = gen_avr_ptr(rA(ctx->opcode)); \
6965 rb = gen_avr_ptr(rB(ctx->opcode)); \
6966 rc = gen_avr_ptr(rC(ctx->opcode)); \
6967 rd = gen_avr_ptr(rD(ctx->opcode)); \
6968 if (Rc(ctx->opcode)) { \
6969 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
6971 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
6973 tcg_temp_free_ptr(ra); \
6974 tcg_temp_free_ptr(rb); \
6975 tcg_temp_free_ptr(rc); \
6976 tcg_temp_free_ptr(rd); \
6979 GEN_VAFORM_PAIRED(vmhaddshs
, vmhraddshs
, 16)
6981 static void gen_vmladduhm(DisasContext
*ctx
)
6983 TCGv_ptr ra
, rb
, rc
, rd
;
6984 if (unlikely(!ctx
->altivec_enabled
)) {
6985 gen_exception(ctx
, POWERPC_EXCP_VPU
);
6988 ra
= gen_avr_ptr(rA(ctx
->opcode
));
6989 rb
= gen_avr_ptr(rB(ctx
->opcode
));
6990 rc
= gen_avr_ptr(rC(ctx
->opcode
));
6991 rd
= gen_avr_ptr(rD(ctx
->opcode
));
6992 gen_helper_vmladduhm(rd
, ra
, rb
, rc
);
6993 tcg_temp_free_ptr(ra
);
6994 tcg_temp_free_ptr(rb
);
6995 tcg_temp_free_ptr(rc
);
6996 tcg_temp_free_ptr(rd
);
6999 GEN_VAFORM_PAIRED(vmsumubm
, vmsummbm
, 18)
7000 GEN_VAFORM_PAIRED(vmsumuhm
, vmsumuhs
, 19)
7001 GEN_VAFORM_PAIRED(vmsumshm
, vmsumshs
, 20)
7002 GEN_VAFORM_PAIRED(vsel
, vperm
, 21)
7003 GEN_VAFORM_PAIRED(vmaddfp
, vnmsubfp
, 23)
7005 /*** VSX extension ***/
7007 static inline TCGv_i64
cpu_vsrh(int n
)
7012 return cpu_avrh
[n
-32];
7016 static inline TCGv_i64
cpu_vsrl(int n
)
7021 return cpu_avrl
[n
-32];
7025 static void gen_lxsdx(DisasContext
*ctx
)
7028 if (unlikely(!ctx
->vsx_enabled
)) {
7029 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7032 gen_set_access_type(ctx
, ACCESS_INT
);
7033 EA
= tcg_temp_new();
7034 gen_addr_reg_index(ctx
, EA
);
7035 gen_qemu_ld64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
7036 /* NOTE: cpu_vsrl is undefined */
7040 static void gen_lxvd2x(DisasContext
*ctx
)
7043 if (unlikely(!ctx
->vsx_enabled
)) {
7044 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7047 gen_set_access_type(ctx
, ACCESS_INT
);
7048 EA
= tcg_temp_new();
7049 gen_addr_reg_index(ctx
, EA
);
7050 gen_qemu_ld64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
7051 tcg_gen_addi_tl(EA
, EA
, 8);
7052 gen_qemu_ld64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
7056 static void gen_lxvdsx(DisasContext
*ctx
)
7059 if (unlikely(!ctx
->vsx_enabled
)) {
7060 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7063 gen_set_access_type(ctx
, ACCESS_INT
);
7064 EA
= tcg_temp_new();
7065 gen_addr_reg_index(ctx
, EA
);
7066 gen_qemu_ld64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
7067 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
7071 static void gen_lxvw4x(DisasContext
*ctx
)
7075 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
7076 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
7077 if (unlikely(!ctx
->vsx_enabled
)) {
7078 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7081 gen_set_access_type(ctx
, ACCESS_INT
);
7082 EA
= tcg_temp_new();
7083 tmp
= tcg_temp_new_i64();
7085 gen_addr_reg_index(ctx
, EA
);
7086 gen_qemu_ld32u_i64(ctx
, tmp
, EA
);
7087 tcg_gen_addi_tl(EA
, EA
, 4);
7088 gen_qemu_ld32u_i64(ctx
, xth
, EA
);
7089 tcg_gen_deposit_i64(xth
, xth
, tmp
, 32, 32);
7091 tcg_gen_addi_tl(EA
, EA
, 4);
7092 gen_qemu_ld32u_i64(ctx
, tmp
, EA
);
7093 tcg_gen_addi_tl(EA
, EA
, 4);
7094 gen_qemu_ld32u_i64(ctx
, xtl
, EA
);
7095 tcg_gen_deposit_i64(xtl
, xtl
, tmp
, 32, 32);
7098 tcg_temp_free_i64(tmp
);
7101 static void gen_stxsdx(DisasContext
*ctx
)
7104 if (unlikely(!ctx
->vsx_enabled
)) {
7105 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7108 gen_set_access_type(ctx
, ACCESS_INT
);
7109 EA
= tcg_temp_new();
7110 gen_addr_reg_index(ctx
, EA
);
7111 gen_qemu_st64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
7115 static void gen_stxvd2x(DisasContext
*ctx
)
7118 if (unlikely(!ctx
->vsx_enabled
)) {
7119 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7122 gen_set_access_type(ctx
, ACCESS_INT
);
7123 EA
= tcg_temp_new();
7124 gen_addr_reg_index(ctx
, EA
);
7125 gen_qemu_st64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
7126 tcg_gen_addi_tl(EA
, EA
, 8);
7127 gen_qemu_st64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
7131 static void gen_stxvw4x(DisasContext
*ctx
)
7135 if (unlikely(!ctx
->vsx_enabled
)) {
7136 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7139 gen_set_access_type(ctx
, ACCESS_INT
);
7140 EA
= tcg_temp_new();
7141 gen_addr_reg_index(ctx
, EA
);
7142 tmp
= tcg_temp_new_i64();
7144 tcg_gen_shri_i64(tmp
, cpu_vsrh(xS(ctx
->opcode
)), 32);
7145 gen_qemu_st32_i64(ctx
, tmp
, EA
);
7146 tcg_gen_addi_tl(EA
, EA
, 4);
7147 gen_qemu_st32_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
7149 tcg_gen_shri_i64(tmp
, cpu_vsrl(xS(ctx
->opcode
)), 32);
7150 tcg_gen_addi_tl(EA
, EA
, 4);
7151 gen_qemu_st32_i64(ctx
, tmp
, EA
);
7152 tcg_gen_addi_tl(EA
, EA
, 4);
7153 gen_qemu_st32_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
7156 tcg_temp_free_i64(tmp
);
7159 static void gen_xxpermdi(DisasContext
*ctx
)
7161 if (unlikely(!ctx
->vsx_enabled
)) {
7162 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7166 if ((DM(ctx
->opcode
) & 2) == 0) {
7167 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
7169 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
7171 if ((DM(ctx
->opcode
) & 1) == 0) {
7172 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
7174 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
7182 #define SGN_MASK_DP 0x8000000000000000ul
7183 #define SGN_MASK_SP 0x8000000080000000ul
7185 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
7186 static void glue(gen_, name)(DisasContext * ctx) \
7189 if (unlikely(!ctx->vsx_enabled)) { \
7190 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7193 xb = tcg_temp_new_i64(); \
7194 sgm = tcg_temp_new_i64(); \
7195 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
7196 tcg_gen_movi_i64(sgm, sgn_mask); \
7199 tcg_gen_andc_i64(xb, xb, sgm); \
7203 tcg_gen_or_i64(xb, xb, sgm); \
7207 tcg_gen_xor_i64(xb, xb, sgm); \
7211 TCGv_i64 xa = tcg_temp_new_i64(); \
7212 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
7213 tcg_gen_and_i64(xa, xa, sgm); \
7214 tcg_gen_andc_i64(xb, xb, sgm); \
7215 tcg_gen_or_i64(xb, xb, xa); \
7216 tcg_temp_free_i64(xa); \
7220 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
7221 tcg_temp_free_i64(xb); \
7222 tcg_temp_free_i64(sgm); \
7225 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
7226 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
7227 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
7228 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
7230 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
7231 static void glue(gen_, name)(DisasContext * ctx) \
7233 TCGv_i64 xbh, xbl, sgm; \
7234 if (unlikely(!ctx->vsx_enabled)) { \
7235 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7238 xbh = tcg_temp_new_i64(); \
7239 xbl = tcg_temp_new_i64(); \
7240 sgm = tcg_temp_new_i64(); \
7241 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
7242 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
7243 tcg_gen_movi_i64(sgm, sgn_mask); \
7246 tcg_gen_andc_i64(xbh, xbh, sgm); \
7247 tcg_gen_andc_i64(xbl, xbl, sgm); \
7251 tcg_gen_or_i64(xbh, xbh, sgm); \
7252 tcg_gen_or_i64(xbl, xbl, sgm); \
7256 tcg_gen_xor_i64(xbh, xbh, sgm); \
7257 tcg_gen_xor_i64(xbl, xbl, sgm); \
7261 TCGv_i64 xah = tcg_temp_new_i64(); \
7262 TCGv_i64 xal = tcg_temp_new_i64(); \
7263 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
7264 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
7265 tcg_gen_and_i64(xah, xah, sgm); \
7266 tcg_gen_and_i64(xal, xal, sgm); \
7267 tcg_gen_andc_i64(xbh, xbh, sgm); \
7268 tcg_gen_andc_i64(xbl, xbl, sgm); \
7269 tcg_gen_or_i64(xbh, xbh, xah); \
7270 tcg_gen_or_i64(xbl, xbl, xal); \
7271 tcg_temp_free_i64(xah); \
7272 tcg_temp_free_i64(xal); \
7276 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
7277 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
7278 tcg_temp_free_i64(xbh); \
7279 tcg_temp_free_i64(xbl); \
7280 tcg_temp_free_i64(sgm); \
7283 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
7284 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
7285 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
7286 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
7287 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
7288 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
7289 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
7290 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
7293 #define VSX_LOGICAL(name, tcg_op) \
7294 static void glue(gen_, name)(DisasContext * ctx) \
7296 if (unlikely(!ctx->vsx_enabled)) { \
7297 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7300 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
7301 cpu_vsrh(xB(ctx->opcode))); \
7302 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
7303 cpu_vsrl(xB(ctx->opcode))); \
7306 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
7307 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
7308 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
7309 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
7310 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
7312 #define VSX_XXMRG(name, high) \
7313 static void glue(gen_, name)(DisasContext * ctx) \
7315 TCGv_i64 a0, a1, b0, b1; \
7316 if (unlikely(!ctx->vsx_enabled)) { \
7317 gen_exception(ctx, POWERPC_EXCP_VSXU); \
7320 a0 = tcg_temp_new_i64(); \
7321 a1 = tcg_temp_new_i64(); \
7322 b0 = tcg_temp_new_i64(); \
7323 b1 = tcg_temp_new_i64(); \
7325 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
7326 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
7327 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
7328 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
7330 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
7331 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
7332 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
7333 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
7335 tcg_gen_shri_i64(a0, a0, 32); \
7336 tcg_gen_shri_i64(b0, b0, 32); \
7337 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
7339 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
7341 tcg_temp_free_i64(a0); \
7342 tcg_temp_free_i64(a1); \
7343 tcg_temp_free_i64(b0); \
7344 tcg_temp_free_i64(b1); \
7347 VSX_XXMRG(xxmrghw
, 1)
7348 VSX_XXMRG(xxmrglw
, 0)
7350 static void gen_xxsel(DisasContext
* ctx
)
7353 if (unlikely(!ctx
->vsx_enabled
)) {
7354 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7357 a
= tcg_temp_new_i64();
7358 b
= tcg_temp_new_i64();
7359 c
= tcg_temp_new_i64();
7361 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
7362 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
7363 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
7365 tcg_gen_and_i64(b
, b
, c
);
7366 tcg_gen_andc_i64(a
, a
, c
);
7367 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
7369 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
7370 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
7371 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
7373 tcg_gen_and_i64(b
, b
, c
);
7374 tcg_gen_andc_i64(a
, a
, c
);
7375 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
7377 tcg_temp_free_i64(a
);
7378 tcg_temp_free_i64(b
);
7379 tcg_temp_free_i64(c
);
7382 static void gen_xxspltw(DisasContext
*ctx
)
7385 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
7386 cpu_vsrl(xB(ctx
->opcode
)) :
7387 cpu_vsrh(xB(ctx
->opcode
));
7389 if (unlikely(!ctx
->vsx_enabled
)) {
7390 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7394 b
= tcg_temp_new_i64();
7395 b2
= tcg_temp_new_i64();
7397 if (UIM(ctx
->opcode
) & 1) {
7398 tcg_gen_ext32u_i64(b
, vsr
);
7400 tcg_gen_shri_i64(b
, vsr
, 32);
7403 tcg_gen_shli_i64(b2
, b
, 32);
7404 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
7405 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
7407 tcg_temp_free_i64(b
);
7408 tcg_temp_free_i64(b2
);
7411 static void gen_xxsldwi(DisasContext
*ctx
)
7414 if (unlikely(!ctx
->vsx_enabled
)) {
7415 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
7418 xth
= tcg_temp_new_i64();
7419 xtl
= tcg_temp_new_i64();
7421 switch (SHW(ctx
->opcode
)) {
7423 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
7424 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
7428 TCGv_i64 t0
= tcg_temp_new_i64();
7429 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
7430 tcg_gen_shli_i64(xth
, xth
, 32);
7431 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
7432 tcg_gen_shri_i64(t0
, t0
, 32);
7433 tcg_gen_or_i64(xth
, xth
, t0
);
7434 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
7435 tcg_gen_shli_i64(xtl
, xtl
, 32);
7436 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
7437 tcg_gen_shri_i64(t0
, t0
, 32);
7438 tcg_gen_or_i64(xtl
, xtl
, t0
);
7439 tcg_temp_free_i64(t0
);
7443 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
7444 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
7448 TCGv_i64 t0
= tcg_temp_new_i64();
7449 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
7450 tcg_gen_shli_i64(xth
, xth
, 32);
7451 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
7452 tcg_gen_shri_i64(t0
, t0
, 32);
7453 tcg_gen_or_i64(xth
, xth
, t0
);
7454 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
7455 tcg_gen_shli_i64(xtl
, xtl
, 32);
7456 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
7457 tcg_gen_shri_i64(t0
, t0
, 32);
7458 tcg_gen_or_i64(xtl
, xtl
, t0
);
7459 tcg_temp_free_i64(t0
);
7464 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
7465 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
7467 tcg_temp_free_i64(xth
);
7468 tcg_temp_free_i64(xtl
);
7472 /*** SPE extension ***/
7473 /* Register moves */
7475 static inline void gen_evmra(DisasContext
*ctx
)
7478 if (unlikely(!ctx
->spe_enabled
)) {
7479 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
7483 #if defined(TARGET_PPC64)
7485 tcg_gen_mov_i64(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
7488 tcg_gen_st_i64(cpu_gpr
[rA(ctx
->opcode
)],
7490 offsetof(CPUPPCState
, spe_acc
));
7492 TCGv_i64 tmp
= tcg_temp_new_i64();
7494 /* tmp := rA_lo + rA_hi << 32 */
7495 tcg_gen_concat_i32_i64(tmp
, cpu_gpr
[rA(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
7497 /* spe_acc := tmp */
7498 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
7499 tcg_temp_free_i64(tmp
);
7502 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
7503 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
7507 static inline void gen_load_gpr64(TCGv_i64 t
, int reg
)
7509 #if defined(TARGET_PPC64)
7510 tcg_gen_mov_i64(t
, cpu_gpr
[reg
]);
7512 tcg_gen_concat_i32_i64(t
, cpu_gpr
[reg
], cpu_gprh
[reg
]);
7516 static inline void gen_store_gpr64(int reg
, TCGv_i64 t
)
7518 #if defined(TARGET_PPC64)
7519 tcg_gen_mov_i64(cpu_gpr
[reg
], t
);
7521 TCGv_i64 tmp
= tcg_temp_new_i64();
7522 tcg_gen_trunc_i64_i32(cpu_gpr
[reg
], t
);
7523 tcg_gen_shri_i64(tmp
, t
, 32);
7524 tcg_gen_trunc_i64_i32(cpu_gprh
[reg
], tmp
);
7525 tcg_temp_free_i64(tmp
);
7529 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
7530 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
7532 if (Rc(ctx->opcode)) \
7538 /* Handler for undefined SPE opcodes */
7539 static inline void gen_speundef(DisasContext
*ctx
)
7541 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
7545 #if defined(TARGET_PPC64)
7546 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7547 static inline void gen_##name(DisasContext *ctx) \
7549 if (unlikely(!ctx->spe_enabled)) { \
7550 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7553 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7554 cpu_gpr[rB(ctx->opcode)]); \
7557 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
7558 static inline void gen_##name(DisasContext *ctx) \
7560 if (unlikely(!ctx->spe_enabled)) { \
7561 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7564 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7565 cpu_gpr[rB(ctx->opcode)]); \
7566 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7567 cpu_gprh[rB(ctx->opcode)]); \
7571 GEN_SPEOP_LOGIC2(evand
, tcg_gen_and_tl
);
7572 GEN_SPEOP_LOGIC2(evandc
, tcg_gen_andc_tl
);
7573 GEN_SPEOP_LOGIC2(evxor
, tcg_gen_xor_tl
);
7574 GEN_SPEOP_LOGIC2(evor
, tcg_gen_or_tl
);
7575 GEN_SPEOP_LOGIC2(evnor
, tcg_gen_nor_tl
);
7576 GEN_SPEOP_LOGIC2(eveqv
, tcg_gen_eqv_tl
);
7577 GEN_SPEOP_LOGIC2(evorc
, tcg_gen_orc_tl
);
7578 GEN_SPEOP_LOGIC2(evnand
, tcg_gen_nand_tl
);
7580 /* SPE logic immediate */
7581 #if defined(TARGET_PPC64)
7582 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7583 static inline void gen_##name(DisasContext *ctx) \
7585 if (unlikely(!ctx->spe_enabled)) { \
7586 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7589 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7590 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7591 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7592 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7593 tcg_opi(t0, t0, rB(ctx->opcode)); \
7594 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7595 tcg_gen_trunc_i64_i32(t1, t2); \
7596 tcg_temp_free_i64(t2); \
7597 tcg_opi(t1, t1, rB(ctx->opcode)); \
7598 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7599 tcg_temp_free_i32(t0); \
7600 tcg_temp_free_i32(t1); \
7603 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
7604 static inline void gen_##name(DisasContext *ctx) \
7606 if (unlikely(!ctx->spe_enabled)) { \
7607 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7610 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7612 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7616 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi
, tcg_gen_shli_i32
);
7617 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu
, tcg_gen_shri_i32
);
7618 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis
, tcg_gen_sari_i32
);
7619 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi
, tcg_gen_rotli_i32
);
7621 /* SPE arithmetic */
7622 #if defined(TARGET_PPC64)
7623 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7624 static inline void gen_##name(DisasContext *ctx) \
7626 if (unlikely(!ctx->spe_enabled)) { \
7627 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7630 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7631 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7632 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7633 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7635 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7636 tcg_gen_trunc_i64_i32(t1, t2); \
7637 tcg_temp_free_i64(t2); \
7639 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7640 tcg_temp_free_i32(t0); \
7641 tcg_temp_free_i32(t1); \
7644 #define GEN_SPEOP_ARITH1(name, tcg_op) \
7645 static inline void gen_##name(DisasContext *ctx) \
7647 if (unlikely(!ctx->spe_enabled)) { \
7648 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7651 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
7652 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
7656 static inline void gen_op_evabs(TCGv_i32 ret
, TCGv_i32 arg1
)
7658 int l1
= gen_new_label();
7659 int l2
= gen_new_label();
7661 tcg_gen_brcondi_i32(TCG_COND_GE
, arg1
, 0, l1
);
7662 tcg_gen_neg_i32(ret
, arg1
);
7665 tcg_gen_mov_i32(ret
, arg1
);
7668 GEN_SPEOP_ARITH1(evabs
, gen_op_evabs
);
7669 GEN_SPEOP_ARITH1(evneg
, tcg_gen_neg_i32
);
7670 GEN_SPEOP_ARITH1(evextsb
, tcg_gen_ext8s_i32
);
7671 GEN_SPEOP_ARITH1(evextsh
, tcg_gen_ext16s_i32
);
7672 static inline void gen_op_evrndw(TCGv_i32 ret
, TCGv_i32 arg1
)
7674 tcg_gen_addi_i32(ret
, arg1
, 0x8000);
7675 tcg_gen_ext16u_i32(ret
, ret
);
7677 GEN_SPEOP_ARITH1(evrndw
, gen_op_evrndw
);
7678 GEN_SPEOP_ARITH1(evcntlsw
, gen_helper_cntlsw32
);
7679 GEN_SPEOP_ARITH1(evcntlzw
, gen_helper_cntlzw32
);
7681 #if defined(TARGET_PPC64)
7682 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7683 static inline void gen_##name(DisasContext *ctx) \
7685 if (unlikely(!ctx->spe_enabled)) { \
7686 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7689 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7690 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7691 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
7692 TCGv_i64 t3 = tcg_temp_local_new_i64(); \
7693 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7694 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
7695 tcg_op(t0, t0, t2); \
7696 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
7697 tcg_gen_trunc_i64_i32(t1, t3); \
7698 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
7699 tcg_gen_trunc_i64_i32(t2, t3); \
7700 tcg_temp_free_i64(t3); \
7701 tcg_op(t1, t1, t2); \
7702 tcg_temp_free_i32(t2); \
7703 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7704 tcg_temp_free_i32(t0); \
7705 tcg_temp_free_i32(t1); \
7708 #define GEN_SPEOP_ARITH2(name, tcg_op) \
7709 static inline void gen_##name(DisasContext *ctx) \
7711 if (unlikely(!ctx->spe_enabled)) { \
7712 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7715 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
7716 cpu_gpr[rB(ctx->opcode)]); \
7717 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
7718 cpu_gprh[rB(ctx->opcode)]); \
7722 static inline void gen_op_evsrwu(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
7727 l1
= gen_new_label();
7728 l2
= gen_new_label();
7729 t0
= tcg_temp_local_new_i32();
7730 /* No error here: 6 bits are used */
7731 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
7732 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
7733 tcg_gen_shr_i32(ret
, arg1
, t0
);
7736 tcg_gen_movi_i32(ret
, 0);
7738 tcg_temp_free_i32(t0
);
7740 GEN_SPEOP_ARITH2(evsrwu
, gen_op_evsrwu
);
7741 static inline void gen_op_evsrws(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
7746 l1
= gen_new_label();
7747 l2
= gen_new_label();
7748 t0
= tcg_temp_local_new_i32();
7749 /* No error here: 6 bits are used */
7750 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
7751 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
7752 tcg_gen_sar_i32(ret
, arg1
, t0
);
7755 tcg_gen_movi_i32(ret
, 0);
7757 tcg_temp_free_i32(t0
);
7759 GEN_SPEOP_ARITH2(evsrws
, gen_op_evsrws
);
7760 static inline void gen_op_evslw(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
7765 l1
= gen_new_label();
7766 l2
= gen_new_label();
7767 t0
= tcg_temp_local_new_i32();
7768 /* No error here: 6 bits are used */
7769 tcg_gen_andi_i32(t0
, arg2
, 0x3F);
7770 tcg_gen_brcondi_i32(TCG_COND_GE
, t0
, 32, l1
);
7771 tcg_gen_shl_i32(ret
, arg1
, t0
);
7774 tcg_gen_movi_i32(ret
, 0);
7776 tcg_temp_free_i32(t0
);
7778 GEN_SPEOP_ARITH2(evslw
, gen_op_evslw
);
7779 static inline void gen_op_evrlw(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
7781 TCGv_i32 t0
= tcg_temp_new_i32();
7782 tcg_gen_andi_i32(t0
, arg2
, 0x1F);
7783 tcg_gen_rotl_i32(ret
, arg1
, t0
);
7784 tcg_temp_free_i32(t0
);
7786 GEN_SPEOP_ARITH2(evrlw
, gen_op_evrlw
);
7787 static inline void gen_evmergehi(DisasContext
*ctx
)
7789 if (unlikely(!ctx
->spe_enabled
)) {
7790 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
7793 #if defined(TARGET_PPC64)
7794 TCGv t0
= tcg_temp_new();
7795 TCGv t1
= tcg_temp_new();
7796 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 32);
7797 tcg_gen_andi_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], 0xFFFFFFFF0000000ULL
);
7798 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
7802 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
7803 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
7806 GEN_SPEOP_ARITH2(evaddw
, tcg_gen_add_i32
);
7807 static inline void gen_op_evsubf(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
7809 tcg_gen_sub_i32(ret
, arg2
, arg1
);
7811 GEN_SPEOP_ARITH2(evsubfw
, gen_op_evsubf
);
7813 /* SPE arithmetic immediate */
7814 #if defined(TARGET_PPC64)
7815 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7816 static inline void gen_##name(DisasContext *ctx) \
7818 if (unlikely(!ctx->spe_enabled)) { \
7819 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7822 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7823 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7824 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7825 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
7826 tcg_op(t0, t0, rA(ctx->opcode)); \
7827 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7828 tcg_gen_trunc_i64_i32(t1, t2); \
7829 tcg_temp_free_i64(t2); \
7830 tcg_op(t1, t1, rA(ctx->opcode)); \
7831 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
7832 tcg_temp_free_i32(t0); \
7833 tcg_temp_free_i32(t1); \
7836 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
7837 static inline void gen_##name(DisasContext *ctx) \
7839 if (unlikely(!ctx->spe_enabled)) { \
7840 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7843 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
7845 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
7849 GEN_SPEOP_ARITH_IMM2(evaddiw
, tcg_gen_addi_i32
);
7850 GEN_SPEOP_ARITH_IMM2(evsubifw
, tcg_gen_subi_i32
);
7852 /* SPE comparison */
7853 #if defined(TARGET_PPC64)
7854 #define GEN_SPEOP_COMP(name, tcg_cond) \
7855 static inline void gen_##name(DisasContext *ctx) \
7857 if (unlikely(!ctx->spe_enabled)) { \
7858 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7861 int l1 = gen_new_label(); \
7862 int l2 = gen_new_label(); \
7863 int l3 = gen_new_label(); \
7864 int l4 = gen_new_label(); \
7865 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
7866 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
7867 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
7868 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
7869 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
7870 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
7871 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
7873 gen_set_label(l1); \
7874 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7875 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7876 gen_set_label(l2); \
7877 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
7878 tcg_gen_trunc_i64_i32(t0, t2); \
7879 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
7880 tcg_gen_trunc_i64_i32(t1, t2); \
7881 tcg_temp_free_i64(t2); \
7882 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
7883 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7884 ~(CRF_CH | CRF_CH_AND_CL)); \
7886 gen_set_label(l3); \
7887 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7888 CRF_CH | CRF_CH_OR_CL); \
7889 gen_set_label(l4); \
7890 tcg_temp_free_i32(t0); \
7891 tcg_temp_free_i32(t1); \
7894 #define GEN_SPEOP_COMP(name, tcg_cond) \
7895 static inline void gen_##name(DisasContext *ctx) \
7897 if (unlikely(!ctx->spe_enabled)) { \
7898 gen_exception(ctx, POWERPC_EXCP_SPEU); \
7901 int l1 = gen_new_label(); \
7902 int l2 = gen_new_label(); \
7903 int l3 = gen_new_label(); \
7904 int l4 = gen_new_label(); \
7906 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
7907 cpu_gpr[rB(ctx->opcode)], l1); \
7908 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
7910 gen_set_label(l1); \
7911 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
7912 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
7913 gen_set_label(l2); \
7914 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
7915 cpu_gprh[rB(ctx->opcode)], l3); \
7916 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7917 ~(CRF_CH | CRF_CH_AND_CL)); \
7919 gen_set_label(l3); \
7920 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
7921 CRF_CH | CRF_CH_OR_CL); \
7922 gen_set_label(l4); \
7925 GEN_SPEOP_COMP(evcmpgtu
, TCG_COND_GTU
);
7926 GEN_SPEOP_COMP(evcmpgts
, TCG_COND_GT
);
7927 GEN_SPEOP_COMP(evcmpltu
, TCG_COND_LTU
);
7928 GEN_SPEOP_COMP(evcmplts
, TCG_COND_LT
);
7929 GEN_SPEOP_COMP(evcmpeq
, TCG_COND_EQ
);
7932 static inline void gen_brinc(DisasContext
*ctx
)
7934 /* Note: brinc is usable even if SPE is disabled */
7935 gen_helper_brinc(cpu_gpr
[rD(ctx
->opcode
)],
7936 cpu_gpr
[rA(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
7938 static inline void gen_evmergelo(DisasContext
*ctx
)
7940 if (unlikely(!ctx
->spe_enabled
)) {
7941 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
7944 #if defined(TARGET_PPC64)
7945 TCGv t0
= tcg_temp_new();
7946 TCGv t1
= tcg_temp_new();
7947 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
7948 tcg_gen_shli_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], 32);
7949 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
7953 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
7954 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
7957 static inline void gen_evmergehilo(DisasContext
*ctx
)
7959 if (unlikely(!ctx
->spe_enabled
)) {
7960 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
7963 #if defined(TARGET_PPC64)
7964 TCGv t0
= tcg_temp_new();
7965 TCGv t1
= tcg_temp_new();
7966 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)]);
7967 tcg_gen_andi_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], 0xFFFFFFFF0000000ULL
);
7968 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
7972 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
7973 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
7976 static inline void gen_evmergelohi(DisasContext
*ctx
)
7978 if (unlikely(!ctx
->spe_enabled
)) {
7979 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
7982 #if defined(TARGET_PPC64)
7983 TCGv t0
= tcg_temp_new();
7984 TCGv t1
= tcg_temp_new();
7985 tcg_gen_shri_tl(t0
, cpu_gpr
[rB(ctx
->opcode
)], 32);
7986 tcg_gen_shli_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], 32);
7987 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, t1
);
7991 if (rD(ctx
->opcode
) == rA(ctx
->opcode
)) {
7992 TCGv_i32 tmp
= tcg_temp_new_i32();
7993 tcg_gen_mov_i32(tmp
, cpu_gpr
[rA(ctx
->opcode
)]);
7994 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
7995 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], tmp
);
7996 tcg_temp_free_i32(tmp
);
7998 tcg_gen_mov_i32(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
7999 tcg_gen_mov_i32(cpu_gprh
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
8003 static inline void gen_evsplati(DisasContext
*ctx
)
8005 uint64_t imm
= ((int32_t)(rA(ctx
->opcode
) << 27)) >> 27;
8007 #if defined(TARGET_PPC64)
8008 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], (imm
<< 32) | imm
);
8010 tcg_gen_movi_i32(cpu_gpr
[rD(ctx
->opcode
)], imm
);
8011 tcg_gen_movi_i32(cpu_gprh
[rD(ctx
->opcode
)], imm
);
8014 static inline void gen_evsplatfi(DisasContext
*ctx
)
8016 uint64_t imm
= rA(ctx
->opcode
) << 27;
8018 #if defined(TARGET_PPC64)
8019 tcg_gen_movi_tl(cpu_gpr
[rD(ctx
->opcode
)], (imm
<< 32) | imm
);
8021 tcg_gen_movi_i32(cpu_gpr
[rD(ctx
->opcode
)], imm
);
8022 tcg_gen_movi_i32(cpu_gprh
[rD(ctx
->opcode
)], imm
);
8026 static inline void gen_evsel(DisasContext
*ctx
)
8028 int l1
= gen_new_label();
8029 int l2
= gen_new_label();
8030 int l3
= gen_new_label();
8031 int l4
= gen_new_label();
8032 TCGv_i32 t0
= tcg_temp_local_new_i32();
8033 #if defined(TARGET_PPC64)
8034 TCGv t1
= tcg_temp_local_new();
8035 TCGv t2
= tcg_temp_local_new();
8037 tcg_gen_andi_i32(t0
, cpu_crf
[ctx
->opcode
& 0x07], 1 << 3);
8038 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
8039 #if defined(TARGET_PPC64)
8040 tcg_gen_andi_tl(t1
, cpu_gpr
[rA(ctx
->opcode
)], 0xFFFFFFFF00000000ULL
);
8042 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)]);
8046 #if defined(TARGET_PPC64)
8047 tcg_gen_andi_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)], 0xFFFFFFFF00000000ULL
);
8049 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rB(ctx
->opcode
)]);
8052 tcg_gen_andi_i32(t0
, cpu_crf
[ctx
->opcode
& 0x07], 1 << 2);
8053 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l3
);
8054 #if defined(TARGET_PPC64)
8055 tcg_gen_ext32u_tl(t2
, cpu_gpr
[rA(ctx
->opcode
)]);
8057 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
8061 #if defined(TARGET_PPC64)
8062 tcg_gen_ext32u_tl(t2
, cpu_gpr
[rB(ctx
->opcode
)]);
8064 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rB(ctx
->opcode
)]);
8067 tcg_temp_free_i32(t0
);
8068 #if defined(TARGET_PPC64)
8069 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], t1
, t2
);
8075 static void gen_evsel0(DisasContext
*ctx
)
8080 static void gen_evsel1(DisasContext
*ctx
)
8085 static void gen_evsel2(DisasContext
*ctx
)
8090 static void gen_evsel3(DisasContext
*ctx
)
8097 static inline void gen_evmwumi(DisasContext
*ctx
)
8101 if (unlikely(!ctx
->spe_enabled
)) {
8102 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8106 t0
= tcg_temp_new_i64();
8107 t1
= tcg_temp_new_i64();
8109 /* t0 := rA; t1 := rB */
8110 #if defined(TARGET_PPC64)
8111 tcg_gen_ext32u_tl(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
8112 tcg_gen_ext32u_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
8114 tcg_gen_extu_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
8115 tcg_gen_extu_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
8118 tcg_gen_mul_i64(t0
, t0
, t1
); /* t0 := rA * rB */
8120 gen_store_gpr64(rD(ctx
->opcode
), t0
); /* rD := t0 */
8122 tcg_temp_free_i64(t0
);
8123 tcg_temp_free_i64(t1
);
8126 static inline void gen_evmwumia(DisasContext
*ctx
)
8130 if (unlikely(!ctx
->spe_enabled
)) {
8131 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8135 gen_evmwumi(ctx
); /* rD := rA * rB */
8137 tmp
= tcg_temp_new_i64();
8140 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
8141 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8142 tcg_temp_free_i64(tmp
);
8145 static inline void gen_evmwumiaa(DisasContext
*ctx
)
8150 if (unlikely(!ctx
->spe_enabled
)) {
8151 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8155 gen_evmwumi(ctx
); /* rD := rA * rB */
8157 acc
= tcg_temp_new_i64();
8158 tmp
= tcg_temp_new_i64();
8161 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
8164 tcg_gen_ld_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8166 /* acc := tmp + acc */
8167 tcg_gen_add_i64(acc
, acc
, tmp
);
8170 tcg_gen_st_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8173 gen_store_gpr64(rD(ctx
->opcode
), acc
);
8175 tcg_temp_free_i64(acc
);
8176 tcg_temp_free_i64(tmp
);
8179 static inline void gen_evmwsmi(DisasContext
*ctx
)
8183 if (unlikely(!ctx
->spe_enabled
)) {
8184 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8188 t0
= tcg_temp_new_i64();
8189 t1
= tcg_temp_new_i64();
8191 /* t0 := rA; t1 := rB */
8192 #if defined(TARGET_PPC64)
8193 tcg_gen_ext32s_tl(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
8194 tcg_gen_ext32s_tl(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
8196 tcg_gen_ext_tl_i64(t0
, cpu_gpr
[rA(ctx
->opcode
)]);
8197 tcg_gen_ext_tl_i64(t1
, cpu_gpr
[rB(ctx
->opcode
)]);
8200 tcg_gen_mul_i64(t0
, t0
, t1
); /* t0 := rA * rB */
8202 gen_store_gpr64(rD(ctx
->opcode
), t0
); /* rD := t0 */
8204 tcg_temp_free_i64(t0
);
8205 tcg_temp_free_i64(t1
);
8208 static inline void gen_evmwsmia(DisasContext
*ctx
)
8212 gen_evmwsmi(ctx
); /* rD := rA * rB */
8214 tmp
= tcg_temp_new_i64();
8217 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
8218 tcg_gen_st_i64(tmp
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8220 tcg_temp_free_i64(tmp
);
8223 static inline void gen_evmwsmiaa(DisasContext
*ctx
)
8225 TCGv_i64 acc
= tcg_temp_new_i64();
8226 TCGv_i64 tmp
= tcg_temp_new_i64();
8228 gen_evmwsmi(ctx
); /* rD := rA * rB */
8230 acc
= tcg_temp_new_i64();
8231 tmp
= tcg_temp_new_i64();
8234 gen_load_gpr64(tmp
, rD(ctx
->opcode
));
8237 tcg_gen_ld_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8239 /* acc := tmp + acc */
8240 tcg_gen_add_i64(acc
, acc
, tmp
);
8243 tcg_gen_st_i64(acc
, cpu_env
, offsetof(CPUPPCState
, spe_acc
));
8246 gen_store_gpr64(rD(ctx
->opcode
), acc
);
8248 tcg_temp_free_i64(acc
);
8249 tcg_temp_free_i64(tmp
);
8252 GEN_SPE(evaddw
, speundef
, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
8253 GEN_SPE(evaddiw
, speundef
, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
8254 GEN_SPE(evsubfw
, speundef
, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
8255 GEN_SPE(evsubifw
, speundef
, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
8256 GEN_SPE(evabs
, evneg
, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
8257 GEN_SPE(evextsb
, evextsh
, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
8258 GEN_SPE(evrndw
, evcntlzw
, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
); ////
8259 GEN_SPE(evcntlsw
, brinc
, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE
); //
8260 GEN_SPE(evmra
, speundef
, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE
);
8261 GEN_SPE(speundef
, evand
, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
); ////
8262 GEN_SPE(evandc
, speundef
, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
8263 GEN_SPE(evxor
, evor
, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
8264 GEN_SPE(evnor
, eveqv
, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
8265 GEN_SPE(evmwumi
, evmwsmi
, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
8266 GEN_SPE(evmwumia
, evmwsmia
, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
8267 GEN_SPE(evmwumiaa
, evmwsmiaa
, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
8268 GEN_SPE(speundef
, evorc
, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
); ////
8269 GEN_SPE(evnand
, speundef
, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
8270 GEN_SPE(evsrwu
, evsrws
, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
8271 GEN_SPE(evsrwiu
, evsrwis
, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE
);
8272 GEN_SPE(evslw
, speundef
, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
); ////
8273 GEN_SPE(evslwi
, speundef
, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
8274 GEN_SPE(evrlw
, evsplati
, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE
); //
8275 GEN_SPE(evrlwi
, evsplatfi
, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE
);
8276 GEN_SPE(evmergehi
, evmergelo
, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
8277 GEN_SPE(evmergehilo
, evmergelohi
, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE
); ////
8278 GEN_SPE(evcmpgtu
, evcmpgts
, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE
); ////
8279 GEN_SPE(evcmpltu
, evcmplts
, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE
); ////
8280 GEN_SPE(evcmpeq
, speundef
, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE
); ////
8282 /* SPE load and stores */
8283 static inline void gen_addr_spe_imm_index(DisasContext
*ctx
, TCGv EA
, int sh
)
8285 target_ulong uimm
= rB(ctx
->opcode
);
8287 if (rA(ctx
->opcode
) == 0) {
8288 tcg_gen_movi_tl(EA
, uimm
<< sh
);
8290 tcg_gen_addi_tl(EA
, cpu_gpr
[rA(ctx
->opcode
)], uimm
<< sh
);
8291 if (NARROW_MODE(ctx
)) {
8292 tcg_gen_ext32u_tl(EA
, EA
);
8297 static inline void gen_op_evldd(DisasContext
*ctx
, TCGv addr
)
8299 #if defined(TARGET_PPC64)
8300 gen_qemu_ld64(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
8302 TCGv_i64 t0
= tcg_temp_new_i64();
8303 gen_qemu_ld64(ctx
, t0
, addr
);
8304 tcg_gen_trunc_i64_i32(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8305 tcg_gen_shri_i64(t0
, t0
, 32);
8306 tcg_gen_trunc_i64_i32(cpu_gprh
[rD(ctx
->opcode
)], t0
);
8307 tcg_temp_free_i64(t0
);
8311 static inline void gen_op_evldw(DisasContext
*ctx
, TCGv addr
)
8313 #if defined(TARGET_PPC64)
8314 TCGv t0
= tcg_temp_new();
8315 gen_qemu_ld32u(ctx
, t0
, addr
);
8316 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 32);
8317 gen_addr_add(ctx
, addr
, addr
, 4);
8318 gen_qemu_ld32u(ctx
, t0
, addr
);
8319 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8322 gen_qemu_ld32u(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
8323 gen_addr_add(ctx
, addr
, addr
, 4);
8324 gen_qemu_ld32u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
8328 static inline void gen_op_evldh(DisasContext
*ctx
, TCGv addr
)
8330 TCGv t0
= tcg_temp_new();
8331 #if defined(TARGET_PPC64)
8332 gen_qemu_ld16u(ctx
, t0
, addr
);
8333 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 48);
8334 gen_addr_add(ctx
, addr
, addr
, 2);
8335 gen_qemu_ld16u(ctx
, t0
, addr
);
8336 tcg_gen_shli_tl(t0
, t0
, 32);
8337 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8338 gen_addr_add(ctx
, addr
, addr
, 2);
8339 gen_qemu_ld16u(ctx
, t0
, addr
);
8340 tcg_gen_shli_tl(t0
, t0
, 16);
8341 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8342 gen_addr_add(ctx
, addr
, addr
, 2);
8343 gen_qemu_ld16u(ctx
, t0
, addr
);
8344 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8346 gen_qemu_ld16u(ctx
, t0
, addr
);
8347 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
8348 gen_addr_add(ctx
, addr
, addr
, 2);
8349 gen_qemu_ld16u(ctx
, t0
, addr
);
8350 tcg_gen_or_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
8351 gen_addr_add(ctx
, addr
, addr
, 2);
8352 gen_qemu_ld16u(ctx
, t0
, addr
);
8353 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
8354 gen_addr_add(ctx
, addr
, addr
, 2);
8355 gen_qemu_ld16u(ctx
, t0
, addr
);
8356 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8361 static inline void gen_op_evlhhesplat(DisasContext
*ctx
, TCGv addr
)
8363 TCGv t0
= tcg_temp_new();
8364 gen_qemu_ld16u(ctx
, t0
, addr
);
8365 #if defined(TARGET_PPC64)
8366 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 48);
8367 tcg_gen_shli_tl(t0
, t0
, 16);
8368 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8370 tcg_gen_shli_tl(t0
, t0
, 16);
8371 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
8372 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8377 static inline void gen_op_evlhhousplat(DisasContext
*ctx
, TCGv addr
)
8379 TCGv t0
= tcg_temp_new();
8380 gen_qemu_ld16u(ctx
, t0
, addr
);
8381 #if defined(TARGET_PPC64)
8382 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 32);
8383 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8385 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
8386 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8391 static inline void gen_op_evlhhossplat(DisasContext
*ctx
, TCGv addr
)
8393 TCGv t0
= tcg_temp_new();
8394 gen_qemu_ld16s(ctx
, t0
, addr
);
8395 #if defined(TARGET_PPC64)
8396 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 32);
8397 tcg_gen_ext32u_tl(t0
, t0
);
8398 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8400 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
8401 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8406 static inline void gen_op_evlwhe(DisasContext
*ctx
, TCGv addr
)
8408 TCGv t0
= tcg_temp_new();
8409 #if defined(TARGET_PPC64)
8410 gen_qemu_ld16u(ctx
, t0
, addr
);
8411 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 48);
8412 gen_addr_add(ctx
, addr
, addr
, 2);
8413 gen_qemu_ld16u(ctx
, t0
, addr
);
8414 tcg_gen_shli_tl(t0
, t0
, 16);
8415 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8417 gen_qemu_ld16u(ctx
, t0
, addr
);
8418 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
8419 gen_addr_add(ctx
, addr
, addr
, 2);
8420 gen_qemu_ld16u(ctx
, t0
, addr
);
8421 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 16);
8426 static inline void gen_op_evlwhou(DisasContext
*ctx
, TCGv addr
)
8428 #if defined(TARGET_PPC64)
8429 TCGv t0
= tcg_temp_new();
8430 gen_qemu_ld16u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
8431 gen_addr_add(ctx
, addr
, addr
, 2);
8432 gen_qemu_ld16u(ctx
, t0
, addr
);
8433 tcg_gen_shli_tl(t0
, t0
, 32);
8434 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8437 gen_qemu_ld16u(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
8438 gen_addr_add(ctx
, addr
, addr
, 2);
8439 gen_qemu_ld16u(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
8443 static inline void gen_op_evlwhos(DisasContext
*ctx
, TCGv addr
)
8445 #if defined(TARGET_PPC64)
8446 TCGv t0
= tcg_temp_new();
8447 gen_qemu_ld16s(ctx
, t0
, addr
);
8448 tcg_gen_ext32u_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8449 gen_addr_add(ctx
, addr
, addr
, 2);
8450 gen_qemu_ld16s(ctx
, t0
, addr
);
8451 tcg_gen_shli_tl(t0
, t0
, 32);
8452 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8455 gen_qemu_ld16s(ctx
, cpu_gprh
[rD(ctx
->opcode
)], addr
);
8456 gen_addr_add(ctx
, addr
, addr
, 2);
8457 gen_qemu_ld16s(ctx
, cpu_gpr
[rD(ctx
->opcode
)], addr
);
8461 static inline void gen_op_evlwwsplat(DisasContext
*ctx
, TCGv addr
)
8463 TCGv t0
= tcg_temp_new();
8464 gen_qemu_ld32u(ctx
, t0
, addr
);
8465 #if defined(TARGET_PPC64)
8466 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 32);
8467 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8469 tcg_gen_mov_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
);
8470 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
);
8475 static inline void gen_op_evlwhsplat(DisasContext
*ctx
, TCGv addr
)
8477 TCGv t0
= tcg_temp_new();
8478 #if defined(TARGET_PPC64)
8479 gen_qemu_ld16u(ctx
, t0
, addr
);
8480 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 48);
8481 tcg_gen_shli_tl(t0
, t0
, 32);
8482 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8483 gen_addr_add(ctx
, addr
, addr
, 2);
8484 gen_qemu_ld16u(ctx
, t0
, addr
);
8485 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8486 tcg_gen_shli_tl(t0
, t0
, 16);
8487 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rD(ctx
->opcode
)], t0
);
8489 gen_qemu_ld16u(ctx
, t0
, addr
);
8490 tcg_gen_shli_tl(cpu_gprh
[rD(ctx
->opcode
)], t0
, 16);
8491 tcg_gen_or_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
8492 gen_addr_add(ctx
, addr
, addr
, 2);
8493 gen_qemu_ld16u(ctx
, t0
, addr
);
8494 tcg_gen_shli_tl(cpu_gpr
[rD(ctx
->opcode
)], t0
, 16);
8495 tcg_gen_or_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gprh
[rD(ctx
->opcode
)], t0
);
8500 static inline void gen_op_evstdd(DisasContext
*ctx
, TCGv addr
)
8502 #if defined(TARGET_PPC64)
8503 gen_qemu_st64(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
8505 TCGv_i64 t0
= tcg_temp_new_i64();
8506 tcg_gen_concat_i32_i64(t0
, cpu_gpr
[rS(ctx
->opcode
)], cpu_gprh
[rS(ctx
->opcode
)]);
8507 gen_qemu_st64(ctx
, t0
, addr
);
8508 tcg_temp_free_i64(t0
);
8512 static inline void gen_op_evstdw(DisasContext
*ctx
, TCGv addr
)
8514 #if defined(TARGET_PPC64)
8515 TCGv t0
= tcg_temp_new();
8516 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 32);
8517 gen_qemu_st32(ctx
, t0
, addr
);
8520 gen_qemu_st32(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
8522 gen_addr_add(ctx
, addr
, addr
, 4);
8523 gen_qemu_st32(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
8526 static inline void gen_op_evstdh(DisasContext
*ctx
, TCGv addr
)
8528 TCGv t0
= tcg_temp_new();
8529 #if defined(TARGET_PPC64)
8530 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 48);
8532 tcg_gen_shri_tl(t0
, cpu_gprh
[rS(ctx
->opcode
)], 16);
8534 gen_qemu_st16(ctx
, t0
, addr
);
8535 gen_addr_add(ctx
, addr
, addr
, 2);
8536 #if defined(TARGET_PPC64)
8537 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 32);
8538 gen_qemu_st16(ctx
, t0
, addr
);
8540 gen_qemu_st16(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
8542 gen_addr_add(ctx
, addr
, addr
, 2);
8543 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 16);
8544 gen_qemu_st16(ctx
, t0
, addr
);
8546 gen_addr_add(ctx
, addr
, addr
, 2);
8547 gen_qemu_st16(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
8550 static inline void gen_op_evstwhe(DisasContext
*ctx
, TCGv addr
)
8552 TCGv t0
= tcg_temp_new();
8553 #if defined(TARGET_PPC64)
8554 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 48);
8556 tcg_gen_shri_tl(t0
, cpu_gprh
[rS(ctx
->opcode
)], 16);
8558 gen_qemu_st16(ctx
, t0
, addr
);
8559 gen_addr_add(ctx
, addr
, addr
, 2);
8560 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 16);
8561 gen_qemu_st16(ctx
, t0
, addr
);
8565 static inline void gen_op_evstwho(DisasContext
*ctx
, TCGv addr
)
8567 #if defined(TARGET_PPC64)
8568 TCGv t0
= tcg_temp_new();
8569 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 32);
8570 gen_qemu_st16(ctx
, t0
, addr
);
8573 gen_qemu_st16(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
8575 gen_addr_add(ctx
, addr
, addr
, 2);
8576 gen_qemu_st16(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
8579 static inline void gen_op_evstwwe(DisasContext
*ctx
, TCGv addr
)
8581 #if defined(TARGET_PPC64)
8582 TCGv t0
= tcg_temp_new();
8583 tcg_gen_shri_tl(t0
, cpu_gpr
[rS(ctx
->opcode
)], 32);
8584 gen_qemu_st32(ctx
, t0
, addr
);
8587 gen_qemu_st32(ctx
, cpu_gprh
[rS(ctx
->opcode
)], addr
);
8591 static inline void gen_op_evstwwo(DisasContext
*ctx
, TCGv addr
)
8593 gen_qemu_st32(ctx
, cpu_gpr
[rS(ctx
->opcode
)], addr
);
8596 #define GEN_SPEOP_LDST(name, opc2, sh) \
8597 static void glue(gen_, name)(DisasContext *ctx) \
8600 if (unlikely(!ctx->spe_enabled)) { \
8601 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8604 gen_set_access_type(ctx, ACCESS_INT); \
8605 t0 = tcg_temp_new(); \
8606 if (Rc(ctx->opcode)) { \
8607 gen_addr_spe_imm_index(ctx, t0, sh); \
8609 gen_addr_reg_index(ctx, t0); \
8611 gen_op_##name(ctx, t0); \
8612 tcg_temp_free(t0); \
8615 GEN_SPEOP_LDST(evldd
, 0x00, 3);
8616 GEN_SPEOP_LDST(evldw
, 0x01, 3);
8617 GEN_SPEOP_LDST(evldh
, 0x02, 3);
8618 GEN_SPEOP_LDST(evlhhesplat
, 0x04, 1);
8619 GEN_SPEOP_LDST(evlhhousplat
, 0x06, 1);
8620 GEN_SPEOP_LDST(evlhhossplat
, 0x07, 1);
8621 GEN_SPEOP_LDST(evlwhe
, 0x08, 2);
8622 GEN_SPEOP_LDST(evlwhou
, 0x0A, 2);
8623 GEN_SPEOP_LDST(evlwhos
, 0x0B, 2);
8624 GEN_SPEOP_LDST(evlwwsplat
, 0x0C, 2);
8625 GEN_SPEOP_LDST(evlwhsplat
, 0x0E, 2);
8627 GEN_SPEOP_LDST(evstdd
, 0x10, 3);
8628 GEN_SPEOP_LDST(evstdw
, 0x11, 3);
8629 GEN_SPEOP_LDST(evstdh
, 0x12, 3);
8630 GEN_SPEOP_LDST(evstwhe
, 0x18, 2);
8631 GEN_SPEOP_LDST(evstwho
, 0x1A, 2);
8632 GEN_SPEOP_LDST(evstwwe
, 0x1C, 2);
8633 GEN_SPEOP_LDST(evstwwo
, 0x1E, 2);
8635 /* Multiply and add - TODO */
8637 GEN_SPE(speundef
, evmhessf
, 0x01, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);//
8638 GEN_SPE(speundef
, evmhossf
, 0x03, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8639 GEN_SPE(evmheumi
, evmhesmi
, 0x04, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
8640 GEN_SPE(speundef
, evmhesmf
, 0x05, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8641 GEN_SPE(evmhoumi
, evmhosmi
, 0x06, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
8642 GEN_SPE(speundef
, evmhosmf
, 0x07, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8643 GEN_SPE(speundef
, evmhessfa
, 0x11, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8644 GEN_SPE(speundef
, evmhossfa
, 0x13, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8645 GEN_SPE(evmheumia
, evmhesmia
, 0x14, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
8646 GEN_SPE(speundef
, evmhesmfa
, 0x15, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8647 GEN_SPE(evmhoumia
, evmhosmia
, 0x16, 0x10, 0x00000000, 0x00000000, PPC_SPE
);
8648 GEN_SPE(speundef
, evmhosmfa
, 0x17, 0x10, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8650 GEN_SPE(speundef
, evmwhssf
, 0x03, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8651 GEN_SPE(evmwlumi
, speundef
, 0x04, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
8652 GEN_SPE(evmwhumi
, evmwhsmi
, 0x06, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
8653 GEN_SPE(speundef
, evmwhsmf
, 0x07, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8654 GEN_SPE(speundef
, evmwssf
, 0x09, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8655 GEN_SPE(speundef
, evmwsmf
, 0x0D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8656 GEN_SPE(speundef
, evmwhssfa
, 0x13, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8657 GEN_SPE(evmwlumia
, speundef
, 0x14, 0x11, 0x00000000, 0xFFFFFFFF, PPC_SPE
);
8658 GEN_SPE(evmwhumia
, evmwhsmia
, 0x16, 0x11, 0x00000000, 0x00000000, PPC_SPE
);
8659 GEN_SPE(speundef
, evmwhsmfa
, 0x17, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8660 GEN_SPE(speundef
, evmwssfa
, 0x19, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8661 GEN_SPE(speundef
, evmwsmfa
, 0x1D, 0x11, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8663 GEN_SPE(evadduiaaw
, evaddsiaaw
, 0x00, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
8664 GEN_SPE(evsubfusiaaw
, evsubfssiaaw
, 0x01, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
8665 GEN_SPE(evaddumiaaw
, evaddsmiaaw
, 0x04, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
8666 GEN_SPE(evsubfumiaaw
, evsubfsmiaaw
, 0x05, 0x13, 0x0000F800, 0x0000F800, PPC_SPE
);
8667 GEN_SPE(evdivws
, evdivwu
, 0x06, 0x13, 0x00000000, 0x00000000, PPC_SPE
);
8669 GEN_SPE(evmheusiaaw
, evmhessiaaw
, 0x00, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8670 GEN_SPE(speundef
, evmhessfaaw
, 0x01, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8671 GEN_SPE(evmhousiaaw
, evmhossiaaw
, 0x02, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8672 GEN_SPE(speundef
, evmhossfaaw
, 0x03, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8673 GEN_SPE(evmheumiaaw
, evmhesmiaaw
, 0x04, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8674 GEN_SPE(speundef
, evmhesmfaaw
, 0x05, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8675 GEN_SPE(evmhoumiaaw
, evmhosmiaaw
, 0x06, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8676 GEN_SPE(speundef
, evmhosmfaaw
, 0x07, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8677 GEN_SPE(evmhegumiaa
, evmhegsmiaa
, 0x14, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8678 GEN_SPE(speundef
, evmhegsmfaa
, 0x15, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8679 GEN_SPE(evmhogumiaa
, evmhogsmiaa
, 0x16, 0x14, 0x00000000, 0x00000000, PPC_SPE
);
8680 GEN_SPE(speundef
, evmhogsmfaa
, 0x17, 0x14, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8682 GEN_SPE(evmwlusiaaw
, evmwlssiaaw
, 0x00, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
8683 GEN_SPE(evmwlumiaaw
, evmwlsmiaaw
, 0x04, 0x15, 0x00000000, 0x00000000, PPC_SPE
);
8684 GEN_SPE(speundef
, evmwssfaa
, 0x09, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8685 GEN_SPE(speundef
, evmwsmfaa
, 0x0D, 0x15, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8687 GEN_SPE(evmheusianw
, evmhessianw
, 0x00, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8688 GEN_SPE(speundef
, evmhessfanw
, 0x01, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8689 GEN_SPE(evmhousianw
, evmhossianw
, 0x02, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8690 GEN_SPE(speundef
, evmhossfanw
, 0x03, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8691 GEN_SPE(evmheumianw
, evmhesmianw
, 0x04, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8692 GEN_SPE(speundef
, evmhesmfanw
, 0x05, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8693 GEN_SPE(evmhoumianw
, evmhosmianw
, 0x06, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8694 GEN_SPE(speundef
, evmhosmfanw
, 0x07, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8695 GEN_SPE(evmhegumian
, evmhegsmian
, 0x14, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8696 GEN_SPE(speundef
, evmhegsmfan
, 0x15, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8697 GEN_SPE(evmhigumian
, evmhigsmian
, 0x16, 0x16, 0x00000000, 0x00000000, PPC_SPE
);
8698 GEN_SPE(speundef
, evmhogsmfan
, 0x17, 0x16, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8700 GEN_SPE(evmwlusianw
, evmwlssianw
, 0x00, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
8701 GEN_SPE(evmwlumianw
, evmwlsmianw
, 0x04, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
8702 GEN_SPE(speundef
, evmwssfan
, 0x09, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8703 GEN_SPE(evmwumian
, evmwsmian
, 0x0C, 0x17, 0x00000000, 0x00000000, PPC_SPE
);
8704 GEN_SPE(speundef
, evmwsmfan
, 0x0D, 0x17, 0xFFFFFFFF, 0x00000000, PPC_SPE
);
8707 /*** SPE floating-point extension ***/
8708 #if defined(TARGET_PPC64)
8709 #define GEN_SPEFPUOP_CONV_32_32(name) \
8710 static inline void gen_##name(DisasContext *ctx) \
8714 t0 = tcg_temp_new_i32(); \
8715 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8716 gen_helper_##name(t0, cpu_env, t0); \
8717 t1 = tcg_temp_new(); \
8718 tcg_gen_extu_i32_tl(t1, t0); \
8719 tcg_temp_free_i32(t0); \
8720 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8721 0xFFFFFFFF00000000ULL); \
8722 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8723 tcg_temp_free(t1); \
8725 #define GEN_SPEFPUOP_CONV_32_64(name) \
8726 static inline void gen_##name(DisasContext *ctx) \
8730 t0 = tcg_temp_new_i32(); \
8731 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8732 t1 = tcg_temp_new(); \
8733 tcg_gen_extu_i32_tl(t1, t0); \
8734 tcg_temp_free_i32(t0); \
8735 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8736 0xFFFFFFFF00000000ULL); \
8737 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t1); \
8738 tcg_temp_free(t1); \
8740 #define GEN_SPEFPUOP_CONV_64_32(name) \
8741 static inline void gen_##name(DisasContext *ctx) \
8743 TCGv_i32 t0 = tcg_temp_new_i32(); \
8744 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
8745 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8746 tcg_temp_free_i32(t0); \
8748 #define GEN_SPEFPUOP_CONV_64_64(name) \
8749 static inline void gen_##name(DisasContext *ctx) \
8751 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8752 cpu_gpr[rB(ctx->opcode)]); \
8754 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8755 static inline void gen_##name(DisasContext *ctx) \
8759 if (unlikely(!ctx->spe_enabled)) { \
8760 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8763 t0 = tcg_temp_new_i32(); \
8764 t1 = tcg_temp_new_i32(); \
8765 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8766 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8767 gen_helper_##name(t0, cpu_env, t0, t1); \
8768 tcg_temp_free_i32(t1); \
8769 t2 = tcg_temp_new(); \
8770 tcg_gen_extu_i32_tl(t2, t0); \
8771 tcg_temp_free_i32(t0); \
8772 tcg_gen_andi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], \
8773 0xFFFFFFFF00000000ULL); \
8774 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)], t2); \
8775 tcg_temp_free(t2); \
8777 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8778 static inline void gen_##name(DisasContext *ctx) \
8780 if (unlikely(!ctx->spe_enabled)) { \
8781 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8784 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8785 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8787 #define GEN_SPEFPUOP_COMP_32(name) \
8788 static inline void gen_##name(DisasContext *ctx) \
8791 if (unlikely(!ctx->spe_enabled)) { \
8792 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8795 t0 = tcg_temp_new_i32(); \
8796 t1 = tcg_temp_new_i32(); \
8797 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
8798 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
8799 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8800 tcg_temp_free_i32(t0); \
8801 tcg_temp_free_i32(t1); \
8803 #define GEN_SPEFPUOP_COMP_64(name) \
8804 static inline void gen_##name(DisasContext *ctx) \
8806 if (unlikely(!ctx->spe_enabled)) { \
8807 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8810 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8811 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8814 #define GEN_SPEFPUOP_CONV_32_32(name) \
8815 static inline void gen_##name(DisasContext *ctx) \
8817 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8818 cpu_gpr[rB(ctx->opcode)]); \
8820 #define GEN_SPEFPUOP_CONV_32_64(name) \
8821 static inline void gen_##name(DisasContext *ctx) \
8823 TCGv_i64 t0 = tcg_temp_new_i64(); \
8824 gen_load_gpr64(t0, rB(ctx->opcode)); \
8825 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); \
8826 tcg_temp_free_i64(t0); \
8828 #define GEN_SPEFPUOP_CONV_64_32(name) \
8829 static inline void gen_##name(DisasContext *ctx) \
8831 TCGv_i64 t0 = tcg_temp_new_i64(); \
8832 gen_helper_##name(t0, cpu_env, cpu_gpr[rB(ctx->opcode)]); \
8833 gen_store_gpr64(rD(ctx->opcode), t0); \
8834 tcg_temp_free_i64(t0); \
8836 #define GEN_SPEFPUOP_CONV_64_64(name) \
8837 static inline void gen_##name(DisasContext *ctx) \
8839 TCGv_i64 t0 = tcg_temp_new_i64(); \
8840 gen_load_gpr64(t0, rB(ctx->opcode)); \
8841 gen_helper_##name(t0, cpu_env, t0); \
8842 gen_store_gpr64(rD(ctx->opcode), t0); \
8843 tcg_temp_free_i64(t0); \
8845 #define GEN_SPEFPUOP_ARITH2_32_32(name) \
8846 static inline void gen_##name(DisasContext *ctx) \
8848 if (unlikely(!ctx->spe_enabled)) { \
8849 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8852 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_env, \
8853 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8855 #define GEN_SPEFPUOP_ARITH2_64_64(name) \
8856 static inline void gen_##name(DisasContext *ctx) \
8859 if (unlikely(!ctx->spe_enabled)) { \
8860 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8863 t0 = tcg_temp_new_i64(); \
8864 t1 = tcg_temp_new_i64(); \
8865 gen_load_gpr64(t0, rA(ctx->opcode)); \
8866 gen_load_gpr64(t1, rB(ctx->opcode)); \
8867 gen_helper_##name(t0, cpu_env, t0, t1); \
8868 gen_store_gpr64(rD(ctx->opcode), t0); \
8869 tcg_temp_free_i64(t0); \
8870 tcg_temp_free_i64(t1); \
8872 #define GEN_SPEFPUOP_COMP_32(name) \
8873 static inline void gen_##name(DisasContext *ctx) \
8875 if (unlikely(!ctx->spe_enabled)) { \
8876 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8879 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, \
8880 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
8882 #define GEN_SPEFPUOP_COMP_64(name) \
8883 static inline void gen_##name(DisasContext *ctx) \
8886 if (unlikely(!ctx->spe_enabled)) { \
8887 gen_exception(ctx, POWERPC_EXCP_SPEU); \
8890 t0 = tcg_temp_new_i64(); \
8891 t1 = tcg_temp_new_i64(); \
8892 gen_load_gpr64(t0, rA(ctx->opcode)); \
8893 gen_load_gpr64(t1, rB(ctx->opcode)); \
8894 gen_helper_##name(cpu_crf[crfD(ctx->opcode)], cpu_env, t0, t1); \
8895 tcg_temp_free_i64(t0); \
8896 tcg_temp_free_i64(t1); \
8900 /* Single precision floating-point vectors operations */
8902 GEN_SPEFPUOP_ARITH2_64_64(evfsadd
);
8903 GEN_SPEFPUOP_ARITH2_64_64(evfssub
);
8904 GEN_SPEFPUOP_ARITH2_64_64(evfsmul
);
8905 GEN_SPEFPUOP_ARITH2_64_64(evfsdiv
);
8906 static inline void gen_evfsabs(DisasContext
*ctx
)
8908 if (unlikely(!ctx
->spe_enabled
)) {
8909 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8912 #if defined(TARGET_PPC64)
8913 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], ~0x8000000080000000LL
);
8915 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], ~0x80000000);
8916 tcg_gen_andi_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], ~0x80000000);
8919 static inline void gen_evfsnabs(DisasContext
*ctx
)
8921 if (unlikely(!ctx
->spe_enabled
)) {
8922 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8925 #if defined(TARGET_PPC64)
8926 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x8000000080000000LL
);
8928 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
8929 tcg_gen_ori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], 0x80000000);
8932 static inline void gen_evfsneg(DisasContext
*ctx
)
8934 if (unlikely(!ctx
->spe_enabled
)) {
8935 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8938 #if defined(TARGET_PPC64)
8939 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x8000000080000000LL
);
8941 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
8942 tcg_gen_xori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], 0x80000000);
8947 GEN_SPEFPUOP_CONV_64_64(evfscfui
);
8948 GEN_SPEFPUOP_CONV_64_64(evfscfsi
);
8949 GEN_SPEFPUOP_CONV_64_64(evfscfuf
);
8950 GEN_SPEFPUOP_CONV_64_64(evfscfsf
);
8951 GEN_SPEFPUOP_CONV_64_64(evfsctui
);
8952 GEN_SPEFPUOP_CONV_64_64(evfsctsi
);
8953 GEN_SPEFPUOP_CONV_64_64(evfsctuf
);
8954 GEN_SPEFPUOP_CONV_64_64(evfsctsf
);
8955 GEN_SPEFPUOP_CONV_64_64(evfsctuiz
);
8956 GEN_SPEFPUOP_CONV_64_64(evfsctsiz
);
8959 GEN_SPEFPUOP_COMP_64(evfscmpgt
);
8960 GEN_SPEFPUOP_COMP_64(evfscmplt
);
8961 GEN_SPEFPUOP_COMP_64(evfscmpeq
);
8962 GEN_SPEFPUOP_COMP_64(evfststgt
);
8963 GEN_SPEFPUOP_COMP_64(evfststlt
);
8964 GEN_SPEFPUOP_COMP_64(evfststeq
);
8966 /* Opcodes definitions */
8967 GEN_SPE(evfsadd
, evfssub
, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
8968 GEN_SPE(evfsabs
, evfsnabs
, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
); //
8969 GEN_SPE(evfsneg
, speundef
, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
8970 GEN_SPE(evfsmul
, evfsdiv
, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
8971 GEN_SPE(evfscmpgt
, evfscmplt
, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
8972 GEN_SPE(evfscmpeq
, speundef
, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
8973 GEN_SPE(evfscfui
, evfscfsi
, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
8974 GEN_SPE(evfscfuf
, evfscfsf
, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
8975 GEN_SPE(evfsctui
, evfsctsi
, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
8976 GEN_SPE(evfsctuf
, evfsctsf
, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
8977 GEN_SPE(evfsctuiz
, speundef
, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
8978 GEN_SPE(evfsctsiz
, speundef
, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
8979 GEN_SPE(evfststgt
, evfststlt
, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
8980 GEN_SPE(evfststeq
, speundef
, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
8982 /* Single precision floating-point operations */
8984 GEN_SPEFPUOP_ARITH2_32_32(efsadd
);
8985 GEN_SPEFPUOP_ARITH2_32_32(efssub
);
8986 GEN_SPEFPUOP_ARITH2_32_32(efsmul
);
8987 GEN_SPEFPUOP_ARITH2_32_32(efsdiv
);
8988 static inline void gen_efsabs(DisasContext
*ctx
)
8990 if (unlikely(!ctx
->spe_enabled
)) {
8991 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
8994 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], (target_long
)~0x80000000LL
);
8996 static inline void gen_efsnabs(DisasContext
*ctx
)
8998 if (unlikely(!ctx
->spe_enabled
)) {
8999 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9002 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
9004 static inline void gen_efsneg(DisasContext
*ctx
)
9006 if (unlikely(!ctx
->spe_enabled
)) {
9007 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9010 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x80000000);
9014 GEN_SPEFPUOP_CONV_32_32(efscfui
);
9015 GEN_SPEFPUOP_CONV_32_32(efscfsi
);
9016 GEN_SPEFPUOP_CONV_32_32(efscfuf
);
9017 GEN_SPEFPUOP_CONV_32_32(efscfsf
);
9018 GEN_SPEFPUOP_CONV_32_32(efsctui
);
9019 GEN_SPEFPUOP_CONV_32_32(efsctsi
);
9020 GEN_SPEFPUOP_CONV_32_32(efsctuf
);
9021 GEN_SPEFPUOP_CONV_32_32(efsctsf
);
9022 GEN_SPEFPUOP_CONV_32_32(efsctuiz
);
9023 GEN_SPEFPUOP_CONV_32_32(efsctsiz
);
9024 GEN_SPEFPUOP_CONV_32_64(efscfd
);
9027 GEN_SPEFPUOP_COMP_32(efscmpgt
);
9028 GEN_SPEFPUOP_COMP_32(efscmplt
);
9029 GEN_SPEFPUOP_COMP_32(efscmpeq
);
9030 GEN_SPEFPUOP_COMP_32(efststgt
);
9031 GEN_SPEFPUOP_COMP_32(efststlt
);
9032 GEN_SPEFPUOP_COMP_32(efststeq
);
9034 /* Opcodes definitions */
9035 GEN_SPE(efsadd
, efssub
, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9036 GEN_SPE(efsabs
, efsnabs
, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
); //
9037 GEN_SPE(efsneg
, speundef
, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9038 GEN_SPE(efsmul
, efsdiv
, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
); //
9039 GEN_SPE(efscmpgt
, efscmplt
, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9040 GEN_SPE(efscmpeq
, efscfd
, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE
); //
9041 GEN_SPE(efscfui
, efscfsi
, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9042 GEN_SPE(efscfuf
, efscfsf
, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9043 GEN_SPE(efsctui
, efsctsi
, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9044 GEN_SPE(efsctuf
, efsctsf
, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
); //
9045 GEN_SPE(efsctuiz
, speundef
, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9046 GEN_SPE(efsctsiz
, speundef
, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9047 GEN_SPE(efststgt
, efststlt
, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
); //
9048 GEN_SPE(efststeq
, speundef
, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
); //
9050 /* Double precision floating-point operations */
9052 GEN_SPEFPUOP_ARITH2_64_64(efdadd
);
9053 GEN_SPEFPUOP_ARITH2_64_64(efdsub
);
9054 GEN_SPEFPUOP_ARITH2_64_64(efdmul
);
9055 GEN_SPEFPUOP_ARITH2_64_64(efddiv
);
9056 static inline void gen_efdabs(DisasContext
*ctx
)
9058 if (unlikely(!ctx
->spe_enabled
)) {
9059 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9062 #if defined(TARGET_PPC64)
9063 tcg_gen_andi_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], ~0x8000000000000000LL
);
9065 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9066 tcg_gen_andi_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], ~0x80000000);
9069 static inline void gen_efdnabs(DisasContext
*ctx
)
9071 if (unlikely(!ctx
->spe_enabled
)) {
9072 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9075 #if defined(TARGET_PPC64)
9076 tcg_gen_ori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x8000000000000000LL
);
9078 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9079 tcg_gen_ori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], 0x80000000);
9082 static inline void gen_efdneg(DisasContext
*ctx
)
9084 if (unlikely(!ctx
->spe_enabled
)) {
9085 gen_exception(ctx
, POWERPC_EXCP_SPEU
);
9088 #if defined(TARGET_PPC64)
9089 tcg_gen_xori_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)], 0x8000000000000000LL
);
9091 tcg_gen_mov_tl(cpu_gpr
[rD(ctx
->opcode
)], cpu_gpr
[rA(ctx
->opcode
)]);
9092 tcg_gen_xori_tl(cpu_gprh
[rD(ctx
->opcode
)], cpu_gprh
[rA(ctx
->opcode
)], 0x80000000);
9097 GEN_SPEFPUOP_CONV_64_32(efdcfui
);
9098 GEN_SPEFPUOP_CONV_64_32(efdcfsi
);
9099 GEN_SPEFPUOP_CONV_64_32(efdcfuf
);
9100 GEN_SPEFPUOP_CONV_64_32(efdcfsf
);
9101 GEN_SPEFPUOP_CONV_32_64(efdctui
);
9102 GEN_SPEFPUOP_CONV_32_64(efdctsi
);
9103 GEN_SPEFPUOP_CONV_32_64(efdctuf
);
9104 GEN_SPEFPUOP_CONV_32_64(efdctsf
);
9105 GEN_SPEFPUOP_CONV_32_64(efdctuiz
);
9106 GEN_SPEFPUOP_CONV_32_64(efdctsiz
);
9107 GEN_SPEFPUOP_CONV_64_32(efdcfs
);
9108 GEN_SPEFPUOP_CONV_64_64(efdcfuid
);
9109 GEN_SPEFPUOP_CONV_64_64(efdcfsid
);
9110 GEN_SPEFPUOP_CONV_64_64(efdctuidz
);
9111 GEN_SPEFPUOP_CONV_64_64(efdctsidz
);
9114 GEN_SPEFPUOP_COMP_64(efdcmpgt
);
9115 GEN_SPEFPUOP_COMP_64(efdcmplt
);
9116 GEN_SPEFPUOP_COMP_64(efdcmpeq
);
9117 GEN_SPEFPUOP_COMP_64(efdtstgt
);
9118 GEN_SPEFPUOP_COMP_64(efdtstlt
);
9119 GEN_SPEFPUOP_COMP_64(efdtsteq
);
9121 /* Opcodes definitions */
9122 GEN_SPE(efdadd
, efdsub
, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
); //
9123 GEN_SPE(efdcfuid
, efdcfsid
, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9124 GEN_SPE(efdabs
, efdnabs
, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE
); //
9125 GEN_SPE(efdneg
, speundef
, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
9126 GEN_SPE(efdmul
, efddiv
, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
); //
9127 GEN_SPE(efdctuidz
, efdctsidz
, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9128 GEN_SPE(efdcmpgt
, efdcmplt
, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
); //
9129 GEN_SPE(efdcmpeq
, efdcfs
, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE
); //
9130 GEN_SPE(efdcfui
, efdcfsi
, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9131 GEN_SPE(efdcfuf
, efdcfsf
, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9132 GEN_SPE(efdctui
, efdctsi
, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9133 GEN_SPE(efdctuf
, efdctsf
, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
); //
9134 GEN_SPE(efdctuiz
, speundef
, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
9135 GEN_SPE(efdctsiz
, speundef
, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
9136 GEN_SPE(efdtstgt
, efdtstlt
, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
); //
9137 GEN_SPE(efdtsteq
, speundef
, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE
); //
9139 static opcode_t opcodes
[] = {
9140 GEN_HANDLER(invalid
, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE
),
9141 GEN_HANDLER(cmp
, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER
),
9142 GEN_HANDLER(cmpi
, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER
),
9143 GEN_HANDLER(cmpl
, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER
),
9144 GEN_HANDLER(cmpli
, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER
),
9145 GEN_HANDLER_E(cmpb
, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE
, PPC2_ISA205
),
9146 GEN_HANDLER(isel
, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL
),
9147 GEN_HANDLER(addi
, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9148 GEN_HANDLER(addic
, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9149 GEN_HANDLER2(addic_
, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9150 GEN_HANDLER(addis
, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9151 GEN_HANDLER(mulhw
, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER
),
9152 GEN_HANDLER(mulhwu
, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER
),
9153 GEN_HANDLER(mullw
, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER
),
9154 GEN_HANDLER(mullwo
, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER
),
9155 GEN_HANDLER(mulli
, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9156 #if defined(TARGET_PPC64)
9157 GEN_HANDLER(mulld
, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B
),
9159 GEN_HANDLER(neg
, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER
),
9160 GEN_HANDLER(nego
, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER
),
9161 GEN_HANDLER(subfic
, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9162 GEN_HANDLER2(andi_
, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9163 GEN_HANDLER2(andis_
, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9164 GEN_HANDLER(cntlzw
, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER
),
9165 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER
),
9166 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER
),
9167 GEN_HANDLER(ori
, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9168 GEN_HANDLER(oris
, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9169 GEN_HANDLER(xori
, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9170 GEN_HANDLER(xoris
, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9171 GEN_HANDLER(popcntb
, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB
),
9172 GEN_HANDLER(popcntw
, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD
),
9173 GEN_HANDLER_E(prtyw
, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE
, PPC2_ISA205
),
9174 #if defined(TARGET_PPC64)
9175 GEN_HANDLER(popcntd
, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD
),
9176 GEN_HANDLER(cntlzd
, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B
),
9177 GEN_HANDLER_E(prtyd
, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE
, PPC2_ISA205
),
9179 GEN_HANDLER(rlwimi
, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9180 GEN_HANDLER(rlwinm
, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9181 GEN_HANDLER(rlwnm
, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9182 GEN_HANDLER(slw
, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER
),
9183 GEN_HANDLER(sraw
, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER
),
9184 GEN_HANDLER(srawi
, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER
),
9185 GEN_HANDLER(srw
, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER
),
9186 #if defined(TARGET_PPC64)
9187 GEN_HANDLER(sld
, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B
),
9188 GEN_HANDLER(srad
, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B
),
9189 GEN_HANDLER2(sradi0
, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B
),
9190 GEN_HANDLER2(sradi1
, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B
),
9191 GEN_HANDLER(srd
, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B
),
9193 GEN_HANDLER(frsqrtes
, 0x3B, 0x1A, 0xFF, 0x001F07C0, PPC_FLOAT_FRSQRTES
),
9194 GEN_HANDLER(fsqrt
, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT
),
9195 GEN_HANDLER(fsqrts
, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT
),
9196 GEN_HANDLER(fcmpo
, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT
),
9197 GEN_HANDLER(fcmpu
, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT
),
9198 GEN_HANDLER(fabs
, 0x3F, 0x08, 0x08, 0x001F0000, PPC_FLOAT
),
9199 GEN_HANDLER(fmr
, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT
),
9200 GEN_HANDLER(fnabs
, 0x3F, 0x08, 0x04, 0x001F0000, PPC_FLOAT
),
9201 GEN_HANDLER(fneg
, 0x3F, 0x08, 0x01, 0x001F0000, PPC_FLOAT
),
9202 GEN_HANDLER_E(fcpsgn
, 0x3F, 0x08, 0x00, 0x00000000, PPC_NONE
, PPC2_ISA205
),
9203 GEN_HANDLER(mcrfs
, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT
),
9204 GEN_HANDLER(mffs
, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT
),
9205 GEN_HANDLER(mtfsb0
, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT
),
9206 GEN_HANDLER(mtfsb1
, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT
),
9207 GEN_HANDLER(mtfsf
, 0x3F, 0x07, 0x16, 0x00000000, PPC_FLOAT
),
9208 GEN_HANDLER(mtfsfi
, 0x3F, 0x06, 0x04, 0x006e0800, PPC_FLOAT
),
9209 #if defined(TARGET_PPC64)
9210 GEN_HANDLER(ld
, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B
),
9211 GEN_HANDLER(lq
, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX
),
9212 GEN_HANDLER(std
, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B
),
9214 GEN_HANDLER(lmw
, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9215 GEN_HANDLER(stmw
, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER
),
9216 GEN_HANDLER(lswi
, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING
),
9217 GEN_HANDLER(lswx
, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING
),
9218 GEN_HANDLER(stswi
, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING
),
9219 GEN_HANDLER(stswx
, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING
),
9220 GEN_HANDLER(eieio
, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO
),
9221 GEN_HANDLER(isync
, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM
),
9222 GEN_HANDLER(lwarx
, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES
),
9223 GEN_HANDLER2(stwcx_
, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES
),
9224 #if defined(TARGET_PPC64)
9225 GEN_HANDLER(ldarx
, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B
),
9226 GEN_HANDLER2(stdcx_
, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B
),
9228 GEN_HANDLER(sync
, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC
),
9229 GEN_HANDLER(wait
, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT
),
9230 GEN_HANDLER(b
, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
9231 GEN_HANDLER(bc
, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
9232 GEN_HANDLER(bcctr
, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW
),
9233 GEN_HANDLER(bclr
, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW
),
9234 GEN_HANDLER(mcrf
, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER
),
9235 GEN_HANDLER(rfi
, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW
),
9236 #if defined(TARGET_PPC64)
9237 GEN_HANDLER(rfid
, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B
),
9238 GEN_HANDLER(hrfid
, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H
),
9240 GEN_HANDLER(sc
, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW
),
9241 GEN_HANDLER(tw
, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW
),
9242 GEN_HANDLER(twi
, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW
),
9243 #if defined(TARGET_PPC64)
9244 GEN_HANDLER(td
, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B
),
9245 GEN_HANDLER(tdi
, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B
),
9247 GEN_HANDLER(mcrxr
, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC
),
9248 GEN_HANDLER(mfcr
, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC
),
9249 GEN_HANDLER(mfmsr
, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC
),
9250 GEN_HANDLER(mfspr
, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC
),
9251 GEN_HANDLER(mftb
, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB
),
9252 GEN_HANDLER(mtcrf
, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC
),
9253 #if defined(TARGET_PPC64)
9254 GEN_HANDLER(mtmsrd
, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B
),
9256 GEN_HANDLER(mtmsr
, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC
),
9257 GEN_HANDLER(mtspr
, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC
),
9258 GEN_HANDLER(dcbf
, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE
),
9259 GEN_HANDLER(dcbi
, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE
),
9260 GEN_HANDLER(dcbst
, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE
),
9261 GEN_HANDLER(dcbt
, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE
),
9262 GEN_HANDLER(dcbtst
, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE
),
9263 GEN_HANDLER(dcbz
, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ
),
9264 GEN_HANDLER(dst
, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC
),
9265 GEN_HANDLER(dstst
, 0x1F, 0x16, 0x0B, 0x02000001, PPC_ALTIVEC
),
9266 GEN_HANDLER(dss
, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC
),
9267 GEN_HANDLER(icbi
, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI
),
9268 GEN_HANDLER(dcba
, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA
),
9269 GEN_HANDLER(mfsr
, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT
),
9270 GEN_HANDLER(mfsrin
, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT
),
9271 GEN_HANDLER(mtsr
, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT
),
9272 GEN_HANDLER(mtsrin
, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT
),
9273 #if defined(TARGET_PPC64)
9274 GEN_HANDLER2(mfsr_64b
, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B
),
9275 GEN_HANDLER2(mfsrin_64b
, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
9277 GEN_HANDLER2(mtsr_64b
, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B
),
9278 GEN_HANDLER2(mtsrin_64b
, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
9280 GEN_HANDLER2(slbmte
, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B
),
9281 GEN_HANDLER2(slbmfee
, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B
),
9282 GEN_HANDLER2(slbmfev
, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B
),
9284 GEN_HANDLER(tlbia
, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA
),
9285 GEN_HANDLER(tlbiel
, 0x1F, 0x12, 0x08, 0x03FF0001, PPC_MEM_TLBIE
),
9286 GEN_HANDLER(tlbie
, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE
),
9287 GEN_HANDLER(tlbsync
, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC
),
9288 #if defined(TARGET_PPC64)
9289 GEN_HANDLER(slbia
, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI
),
9290 GEN_HANDLER(slbie
, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI
),
9292 GEN_HANDLER(eciwx
, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN
),
9293 GEN_HANDLER(ecowx
, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN
),
9294 GEN_HANDLER(abs
, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR
),
9295 GEN_HANDLER(abso
, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR
),
9296 GEN_HANDLER(clcs
, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR
),
9297 GEN_HANDLER(div
, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR
),
9298 GEN_HANDLER(divo
, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR
),
9299 GEN_HANDLER(divs
, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR
),
9300 GEN_HANDLER(divso
, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR
),
9301 GEN_HANDLER(doz
, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR
),
9302 GEN_HANDLER(dozo
, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR
),
9303 GEN_HANDLER(dozi
, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR
),
9304 GEN_HANDLER(lscbx
, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR
),
9305 GEN_HANDLER(maskg
, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR
),
9306 GEN_HANDLER(maskir
, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR
),
9307 GEN_HANDLER(mul
, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR
),
9308 GEN_HANDLER(mulo
, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR
),
9309 GEN_HANDLER(nabs
, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR
),
9310 GEN_HANDLER(nabso
, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR
),
9311 GEN_HANDLER(rlmi
, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR
),
9312 GEN_HANDLER(rrib
, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR
),
9313 GEN_HANDLER(sle
, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR
),
9314 GEN_HANDLER(sleq
, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR
),
9315 GEN_HANDLER(sliq
, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR
),
9316 GEN_HANDLER(slliq
, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR
),
9317 GEN_HANDLER(sllq
, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR
),
9318 GEN_HANDLER(slq
, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR
),
9319 GEN_HANDLER(sraiq
, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR
),
9320 GEN_HANDLER(sraq
, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR
),
9321 GEN_HANDLER(sre
, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR
),
9322 GEN_HANDLER(srea
, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR
),
9323 GEN_HANDLER(sreq
, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR
),
9324 GEN_HANDLER(sriq
, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR
),
9325 GEN_HANDLER(srliq
, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR
),
9326 GEN_HANDLER(srlq
, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR
),
9327 GEN_HANDLER(srq
, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR
),
9328 GEN_HANDLER(dsa
, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC
),
9329 GEN_HANDLER(esa
, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC
),
9330 GEN_HANDLER(mfrom
, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC
),
9331 GEN_HANDLER2(tlbld_6xx
, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB
),
9332 GEN_HANDLER2(tlbli_6xx
, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB
),
9333 GEN_HANDLER2(tlbld_74xx
, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB
),
9334 GEN_HANDLER2(tlbli_74xx
, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB
),
9335 GEN_HANDLER(clf
, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER
),
9336 GEN_HANDLER(cli
, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER
),
9337 GEN_HANDLER(dclst
, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER
),
9338 GEN_HANDLER(mfsri
, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER
),
9339 GEN_HANDLER(rac
, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER
),
9340 GEN_HANDLER(rfsvc
, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER
),
9341 GEN_HANDLER(lfq
, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
9342 GEN_HANDLER(lfqu
, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
9343 GEN_HANDLER(lfqux
, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2
),
9344 GEN_HANDLER(lfqx
, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2
),
9345 GEN_HANDLER(stfq
, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
9346 GEN_HANDLER(stfqu
, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2
),
9347 GEN_HANDLER(stfqux
, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2
),
9348 GEN_HANDLER(stfqx
, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2
),
9349 GEN_HANDLER(mfapidi
, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI
),
9350 GEN_HANDLER(tlbiva
, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA
),
9351 GEN_HANDLER(mfdcr
, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR
),
9352 GEN_HANDLER(mtdcr
, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR
),
9353 GEN_HANDLER(mfdcrx
, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX
),
9354 GEN_HANDLER(mtdcrx
, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX
),
9355 GEN_HANDLER(mfdcrux
, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX
),
9356 GEN_HANDLER(mtdcrux
, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX
),
9357 GEN_HANDLER(dccci
, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON
),
9358 GEN_HANDLER(dcread
, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON
),
9359 GEN_HANDLER2(icbt_40x
, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT
),
9360 GEN_HANDLER(iccci
, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON
),
9361 GEN_HANDLER(icread
, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON
),
9362 GEN_HANDLER2(rfci_40x
, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP
),
9363 GEN_HANDLER_E(rfci
, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE
, PPC2_BOOKE206
),
9364 GEN_HANDLER(rfdi
, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI
),
9365 GEN_HANDLER(rfmci
, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI
),
9366 GEN_HANDLER2(tlbre_40x
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB
),
9367 GEN_HANDLER2(tlbsx_40x
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB
),
9368 GEN_HANDLER2(tlbwe_40x
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB
),
9369 GEN_HANDLER2(tlbre_440
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE
),
9370 GEN_HANDLER2(tlbsx_440
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE
),
9371 GEN_HANDLER2(tlbwe_440
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE
),
9372 GEN_HANDLER2_E(tlbre_booke206
, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
9373 PPC_NONE
, PPC2_BOOKE206
),
9374 GEN_HANDLER2_E(tlbsx_booke206
, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
9375 PPC_NONE
, PPC2_BOOKE206
),
9376 GEN_HANDLER2_E(tlbwe_booke206
, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
9377 PPC_NONE
, PPC2_BOOKE206
),
9378 GEN_HANDLER2_E(tlbivax_booke206
, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
9379 PPC_NONE
, PPC2_BOOKE206
),
9380 GEN_HANDLER2_E(tlbilx_booke206
, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
9381 PPC_NONE
, PPC2_BOOKE206
),
9382 GEN_HANDLER2_E(msgsnd
, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
9383 PPC_NONE
, PPC2_PRCNTL
),
9384 GEN_HANDLER2_E(msgclr
, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
9385 PPC_NONE
, PPC2_PRCNTL
),
9386 GEN_HANDLER(wrtee
, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE
),
9387 GEN_HANDLER(wrteei
, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE
),
9388 GEN_HANDLER(dlmzb
, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC
),
9389 GEN_HANDLER_E(mbar
, 0x1F, 0x16, 0x1a, 0x001FF801,
9390 PPC_BOOKE
, PPC2_BOOKE206
),
9391 GEN_HANDLER(msync_4xx
, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE
),
9392 GEN_HANDLER2_E(icbt_440
, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
9393 PPC_BOOKE
, PPC2_BOOKE206
),
9394 GEN_HANDLER(lvsl
, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC
),
9395 GEN_HANDLER(lvsr
, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC
),
9396 GEN_HANDLER(mfvscr
, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC
),
9397 GEN_HANDLER(mtvscr
, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC
),
9398 GEN_HANDLER(vsldoi
, 0x04, 0x16, 0xFF, 0x00000400, PPC_ALTIVEC
),
9399 GEN_HANDLER(vmladduhm
, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC
),
9400 GEN_HANDLER2(evsel0
, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE
),
9401 GEN_HANDLER2(evsel1
, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE
),
9402 GEN_HANDLER2(evsel2
, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE
),
9403 GEN_HANDLER2(evsel3
, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE
),
9405 #undef GEN_INT_ARITH_ADD
9406 #undef GEN_INT_ARITH_ADD_CONST
9407 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
9408 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
9409 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
9410 add_ca, compute_ca, compute_ov) \
9411 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
9412 GEN_INT_ARITH_ADD(add
, 0x08, 0, 0, 0)
9413 GEN_INT_ARITH_ADD(addo
, 0x18, 0, 0, 1)
9414 GEN_INT_ARITH_ADD(addc
, 0x00, 0, 1, 0)
9415 GEN_INT_ARITH_ADD(addco
, 0x10, 0, 1, 1)
9416 GEN_INT_ARITH_ADD(adde
, 0x04, 1, 1, 0)
9417 GEN_INT_ARITH_ADD(addeo
, 0x14, 1, 1, 1)
9418 GEN_INT_ARITH_ADD_CONST(addme
, 0x07, -1LL, 1, 1, 0)
9419 GEN_INT_ARITH_ADD_CONST(addmeo
, 0x17, -1LL, 1, 1, 1)
9420 GEN_INT_ARITH_ADD_CONST(addze
, 0x06, 0, 1, 1, 0)
9421 GEN_INT_ARITH_ADD_CONST(addzeo
, 0x16, 0, 1, 1, 1)
9423 #undef GEN_INT_ARITH_DIVW
9424 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
9425 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
9426 GEN_INT_ARITH_DIVW(divwu
, 0x0E, 0, 0),
9427 GEN_INT_ARITH_DIVW(divwuo
, 0x1E, 0, 1),
9428 GEN_INT_ARITH_DIVW(divw
, 0x0F, 1, 0),
9429 GEN_INT_ARITH_DIVW(divwo
, 0x1F, 1, 1),
9431 #if defined(TARGET_PPC64)
9432 #undef GEN_INT_ARITH_DIVD
9433 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
9434 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
9435 GEN_INT_ARITH_DIVD(divdu
, 0x0E, 0, 0),
9436 GEN_INT_ARITH_DIVD(divduo
, 0x1E, 0, 1),
9437 GEN_INT_ARITH_DIVD(divd
, 0x0F, 1, 0),
9438 GEN_INT_ARITH_DIVD(divdo
, 0x1F, 1, 1),
9440 #undef GEN_INT_ARITH_MUL_HELPER
9441 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
9442 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
9443 GEN_INT_ARITH_MUL_HELPER(mulhdu
, 0x00),
9444 GEN_INT_ARITH_MUL_HELPER(mulhd
, 0x02),
9445 GEN_INT_ARITH_MUL_HELPER(mulldo
, 0x17),
9448 #undef GEN_INT_ARITH_SUBF
9449 #undef GEN_INT_ARITH_SUBF_CONST
9450 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
9451 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
9452 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
9453 add_ca, compute_ca, compute_ov) \
9454 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
9455 GEN_INT_ARITH_SUBF(subf
, 0x01, 0, 0, 0)
9456 GEN_INT_ARITH_SUBF(subfo
, 0x11, 0, 0, 1)
9457 GEN_INT_ARITH_SUBF(subfc
, 0x00, 0, 1, 0)
9458 GEN_INT_ARITH_SUBF(subfco
, 0x10, 0, 1, 1)
9459 GEN_INT_ARITH_SUBF(subfe
, 0x04, 1, 1, 0)
9460 GEN_INT_ARITH_SUBF(subfeo
, 0x14, 1, 1, 1)
9461 GEN_INT_ARITH_SUBF_CONST(subfme
, 0x07, -1LL, 1, 1, 0)
9462 GEN_INT_ARITH_SUBF_CONST(subfmeo
, 0x17, -1LL, 1, 1, 1)
9463 GEN_INT_ARITH_SUBF_CONST(subfze
, 0x06, 0, 1, 1, 0)
9464 GEN_INT_ARITH_SUBF_CONST(subfzeo
, 0x16, 0, 1, 1, 1)
9468 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
9469 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
9470 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
9471 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
9472 GEN_LOGICAL2(and, tcg_gen_and_tl
, 0x00, PPC_INTEGER
),
9473 GEN_LOGICAL2(andc
, tcg_gen_andc_tl
, 0x01, PPC_INTEGER
),
9474 GEN_LOGICAL2(eqv
, tcg_gen_eqv_tl
, 0x08, PPC_INTEGER
),
9475 GEN_LOGICAL1(extsb
, tcg_gen_ext8s_tl
, 0x1D, PPC_INTEGER
),
9476 GEN_LOGICAL1(extsh
, tcg_gen_ext16s_tl
, 0x1C, PPC_INTEGER
),
9477 GEN_LOGICAL2(nand
, tcg_gen_nand_tl
, 0x0E, PPC_INTEGER
),
9478 GEN_LOGICAL2(nor
, tcg_gen_nor_tl
, 0x03, PPC_INTEGER
),
9479 GEN_LOGICAL2(orc
, tcg_gen_orc_tl
, 0x0C, PPC_INTEGER
),
9480 #if defined(TARGET_PPC64)
9481 GEN_LOGICAL1(extsw
, tcg_gen_ext32s_tl
, 0x1E, PPC_64B
),
9484 #if defined(TARGET_PPC64)
9487 #define GEN_PPC64_R2(name, opc1, opc2) \
9488 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
9489 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
9491 #define GEN_PPC64_R4(name, opc1, opc2) \
9492 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
9493 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
9495 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
9497 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
9499 GEN_PPC64_R4(rldicl
, 0x1E, 0x00),
9500 GEN_PPC64_R4(rldicr
, 0x1E, 0x02),
9501 GEN_PPC64_R4(rldic
, 0x1E, 0x04),
9502 GEN_PPC64_R2(rldcl
, 0x1E, 0x08),
9503 GEN_PPC64_R2(rldcr
, 0x1E, 0x09),
9504 GEN_PPC64_R4(rldimi
, 0x1E, 0x06),
9507 #undef _GEN_FLOAT_ACB
9508 #undef GEN_FLOAT_ACB
9509 #undef _GEN_FLOAT_AB
9511 #undef _GEN_FLOAT_AC
9515 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
9516 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type)
9517 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
9518 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type), \
9519 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type)
9520 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9521 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9522 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
9523 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9524 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9525 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
9526 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type)
9527 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
9528 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type), \
9529 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type)
9530 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
9531 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type)
9532 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
9533 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type)
9535 GEN_FLOAT_AB(add
, 0x15, 0x000007C0, 1, PPC_FLOAT
),
9536 GEN_FLOAT_AB(div
, 0x12, 0x000007C0, 1, PPC_FLOAT
),
9537 GEN_FLOAT_AC(mul
, 0x19, 0x0000F800, 1, PPC_FLOAT
),
9538 GEN_FLOAT_BS(re
, 0x3F, 0x18, 1, PPC_FLOAT_EXT
),
9539 GEN_FLOAT_BS(res
, 0x3B, 0x18, 1, PPC_FLOAT_FRES
),
9540 GEN_FLOAT_BS(rsqrte
, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE
),
9541 _GEN_FLOAT_ACB(sel
, sel
, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL
),
9542 GEN_FLOAT_AB(sub
, 0x14, 0x000007C0, 1, PPC_FLOAT
),
9543 GEN_FLOAT_ACB(madd
, 0x1D, 1, PPC_FLOAT
),
9544 GEN_FLOAT_ACB(msub
, 0x1C, 1, PPC_FLOAT
),
9545 GEN_FLOAT_ACB(nmadd
, 0x1F, 1, PPC_FLOAT
),
9546 GEN_FLOAT_ACB(nmsub
, 0x1E, 1, PPC_FLOAT
),
9547 GEN_FLOAT_B(ctiw
, 0x0E, 0x00, 0, PPC_FLOAT
),
9548 GEN_FLOAT_B(ctiwz
, 0x0F, 0x00, 0, PPC_FLOAT
),
9549 GEN_FLOAT_B(rsp
, 0x0C, 0x00, 1, PPC_FLOAT
),
9550 #if defined(TARGET_PPC64)
9551 GEN_FLOAT_B(cfid
, 0x0E, 0x1A, 1, PPC_64B
),
9552 GEN_FLOAT_B(ctid
, 0x0E, 0x19, 0, PPC_64B
),
9553 GEN_FLOAT_B(ctidz
, 0x0F, 0x19, 0, PPC_64B
),
9555 GEN_FLOAT_B(rin
, 0x08, 0x0C, 1, PPC_FLOAT_EXT
),
9556 GEN_FLOAT_B(riz
, 0x08, 0x0D, 1, PPC_FLOAT_EXT
),
9557 GEN_FLOAT_B(rip
, 0x08, 0x0E, 1, PPC_FLOAT_EXT
),
9558 GEN_FLOAT_B(rim
, 0x08, 0x0F, 1, PPC_FLOAT_EXT
),
9565 #define GEN_LD(name, ldop, opc, type) \
9566 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9567 #define GEN_LDU(name, ldop, opc, type) \
9568 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9569 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
9570 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9571 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2) \
9572 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9573 #define GEN_LDS(name, ldop, op, type) \
9574 GEN_LD(name, ldop, op | 0x20, type) \
9575 GEN_LDU(name, ldop, op | 0x21, type) \
9576 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
9577 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
9579 GEN_LDS(lbz
, ld8u
, 0x02, PPC_INTEGER
)
9580 GEN_LDS(lha
, ld16s
, 0x0A, PPC_INTEGER
)
9581 GEN_LDS(lhz
, ld16u
, 0x08, PPC_INTEGER
)
9582 GEN_LDS(lwz
, ld32u
, 0x00, PPC_INTEGER
)
9583 #if defined(TARGET_PPC64)
9584 GEN_LDUX(lwa
, ld32s
, 0x15, 0x0B, PPC_64B
)
9585 GEN_LDX(lwa
, ld32s
, 0x15, 0x0A, PPC_64B
)
9586 GEN_LDUX(ld
, ld64
, 0x15, 0x01, PPC_64B
)
9587 GEN_LDX(ld
, ld64
, 0x15, 0x00, PPC_64B
)
9588 GEN_LDX_E(ldbr
, ld64ur
, 0x14, 0x10, PPC_NONE
, PPC2_DBRX
)
9590 GEN_LDX(lhbr
, ld16ur
, 0x16, 0x18, PPC_INTEGER
)
9591 GEN_LDX(lwbr
, ld32ur
, 0x16, 0x10, PPC_INTEGER
)
9598 #define GEN_ST(name, stop, opc, type) \
9599 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9600 #define GEN_STU(name, stop, opc, type) \
9601 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
9602 #define GEN_STUX(name, stop, opc2, opc3, type) \
9603 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
9604 #define GEN_STX_E(name, stop, opc2, opc3, type, type2) \
9605 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
9606 #define GEN_STS(name, stop, op, type) \
9607 GEN_ST(name, stop, op | 0x20, type) \
9608 GEN_STU(name, stop, op | 0x21, type) \
9609 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
9610 GEN_STX(name, stop, 0x17, op | 0x00, type)
9612 GEN_STS(stb
, st8
, 0x06, PPC_INTEGER
)
9613 GEN_STS(sth
, st16
, 0x0C, PPC_INTEGER
)
9614 GEN_STS(stw
, st32
, 0x04, PPC_INTEGER
)
9615 #if defined(TARGET_PPC64)
9616 GEN_STUX(std
, st64
, 0x15, 0x05, PPC_64B
)
9617 GEN_STX(std
, st64
, 0x15, 0x04, PPC_64B
)
9618 GEN_STX_E(stdbr
, st64r
, 0x14, 0x14, PPC_NONE
, PPC2_DBRX
)
9620 GEN_STX(sthbr
, st16r
, 0x16, 0x1C, PPC_INTEGER
)
9621 GEN_STX(stwbr
, st32r
, 0x16, 0x14, PPC_INTEGER
)
9628 #define GEN_LDF(name, ldop, opc, type) \
9629 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9630 #define GEN_LDUF(name, ldop, opc, type) \
9631 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9632 #define GEN_LDUXF(name, ldop, opc, type) \
9633 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9634 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
9635 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9636 #define GEN_LDFS(name, ldop, op, type) \
9637 GEN_LDF(name, ldop, op | 0x20, type) \
9638 GEN_LDUF(name, ldop, op | 0x21, type) \
9639 GEN_LDUXF(name, ldop, op | 0x01, type) \
9640 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
9642 GEN_LDFS(lfd
, ld64
, 0x12, PPC_FLOAT
)
9643 GEN_LDFS(lfs
, ld32fs
, 0x10, PPC_FLOAT
)
9644 GEN_HANDLER_E(lfiwax
, 0x1f, 0x17, 0x1a, 0x00000001, PPC_NONE
, PPC2_ISA205
),
9645 GEN_HANDLER_E(lfdp
, 0x39, 0xFF, 0xFF, 0x00200003, PPC_NONE
, PPC2_ISA205
),
9646 GEN_HANDLER_E(lfdpx
, 0x1F, 0x17, 0x18, 0x00200001, PPC_NONE
, PPC2_ISA205
),
9653 #define GEN_STF(name, stop, opc, type) \
9654 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
9655 #define GEN_STUF(name, stop, opc, type) \
9656 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
9657 #define GEN_STUXF(name, stop, opc, type) \
9658 GEN_HANDLER(name##ux, 0x1F, 0x17, opc, 0x00000001, type),
9659 #define GEN_STXF(name, stop, opc2, opc3, type) \
9660 GEN_HANDLER(name##x, 0x1F, opc2, opc3, 0x00000001, type),
9661 #define GEN_STFS(name, stop, op, type) \
9662 GEN_STF(name, stop, op | 0x20, type) \
9663 GEN_STUF(name, stop, op | 0x21, type) \
9664 GEN_STUXF(name, stop, op | 0x01, type) \
9665 GEN_STXF(name, stop, 0x17, op | 0x00, type)
9667 GEN_STFS(stfd
, st64
, 0x16, PPC_FLOAT
)
9668 GEN_STFS(stfs
, st32fs
, 0x14, PPC_FLOAT
)
9669 GEN_STXF(stfiw
, st32fiw
, 0x17, 0x1E, PPC_FLOAT_STFIWX
)
9670 GEN_HANDLER_E(stfdp
, 0x3D, 0xFF, 0xFF, 0x00200003, PPC_NONE
, PPC2_ISA205
),
9671 GEN_HANDLER_E(stfdpx
, 0x1F, 0x17, 0x1C, 0x00200001, PPC_NONE
, PPC2_ISA205
),
9674 #define GEN_CRLOGIC(name, tcg_op, opc) \
9675 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
9676 GEN_CRLOGIC(crand
, tcg_gen_and_i32
, 0x08),
9677 GEN_CRLOGIC(crandc
, tcg_gen_andc_i32
, 0x04),
9678 GEN_CRLOGIC(creqv
, tcg_gen_eqv_i32
, 0x09),
9679 GEN_CRLOGIC(crnand
, tcg_gen_nand_i32
, 0x07),
9680 GEN_CRLOGIC(crnor
, tcg_gen_nor_i32
, 0x01),
9681 GEN_CRLOGIC(cror
, tcg_gen_or_i32
, 0x0E),
9682 GEN_CRLOGIC(crorc
, tcg_gen_orc_i32
, 0x0D),
9683 GEN_CRLOGIC(crxor
, tcg_gen_xor_i32
, 0x06),
9685 #undef GEN_MAC_HANDLER
9686 #define GEN_MAC_HANDLER(name, opc2, opc3) \
9687 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
9688 GEN_MAC_HANDLER(macchw
, 0x0C, 0x05),
9689 GEN_MAC_HANDLER(macchwo
, 0x0C, 0x15),
9690 GEN_MAC_HANDLER(macchws
, 0x0C, 0x07),
9691 GEN_MAC_HANDLER(macchwso
, 0x0C, 0x17),
9692 GEN_MAC_HANDLER(macchwsu
, 0x0C, 0x06),
9693 GEN_MAC_HANDLER(macchwsuo
, 0x0C, 0x16),
9694 GEN_MAC_HANDLER(macchwu
, 0x0C, 0x04),
9695 GEN_MAC_HANDLER(macchwuo
, 0x0C, 0x14),
9696 GEN_MAC_HANDLER(machhw
, 0x0C, 0x01),
9697 GEN_MAC_HANDLER(machhwo
, 0x0C, 0x11),
9698 GEN_MAC_HANDLER(machhws
, 0x0C, 0x03),
9699 GEN_MAC_HANDLER(machhwso
, 0x0C, 0x13),
9700 GEN_MAC_HANDLER(machhwsu
, 0x0C, 0x02),
9701 GEN_MAC_HANDLER(machhwsuo
, 0x0C, 0x12),
9702 GEN_MAC_HANDLER(machhwu
, 0x0C, 0x00),
9703 GEN_MAC_HANDLER(machhwuo
, 0x0C, 0x10),
9704 GEN_MAC_HANDLER(maclhw
, 0x0C, 0x0D),
9705 GEN_MAC_HANDLER(maclhwo
, 0x0C, 0x1D),
9706 GEN_MAC_HANDLER(maclhws
, 0x0C, 0x0F),
9707 GEN_MAC_HANDLER(maclhwso
, 0x0C, 0x1F),
9708 GEN_MAC_HANDLER(maclhwu
, 0x0C, 0x0C),
9709 GEN_MAC_HANDLER(maclhwuo
, 0x0C, 0x1C),
9710 GEN_MAC_HANDLER(maclhwsu
, 0x0C, 0x0E),
9711 GEN_MAC_HANDLER(maclhwsuo
, 0x0C, 0x1E),
9712 GEN_MAC_HANDLER(nmacchw
, 0x0E, 0x05),
9713 GEN_MAC_HANDLER(nmacchwo
, 0x0E, 0x15),
9714 GEN_MAC_HANDLER(nmacchws
, 0x0E, 0x07),
9715 GEN_MAC_HANDLER(nmacchwso
, 0x0E, 0x17),
9716 GEN_MAC_HANDLER(nmachhw
, 0x0E, 0x01),
9717 GEN_MAC_HANDLER(nmachhwo
, 0x0E, 0x11),
9718 GEN_MAC_HANDLER(nmachhws
, 0x0E, 0x03),
9719 GEN_MAC_HANDLER(nmachhwso
, 0x0E, 0x13),
9720 GEN_MAC_HANDLER(nmaclhw
, 0x0E, 0x0D),
9721 GEN_MAC_HANDLER(nmaclhwo
, 0x0E, 0x1D),
9722 GEN_MAC_HANDLER(nmaclhws
, 0x0E, 0x0F),
9723 GEN_MAC_HANDLER(nmaclhwso
, 0x0E, 0x1F),
9724 GEN_MAC_HANDLER(mulchw
, 0x08, 0x05),
9725 GEN_MAC_HANDLER(mulchwu
, 0x08, 0x04),
9726 GEN_MAC_HANDLER(mulhhw
, 0x08, 0x01),
9727 GEN_MAC_HANDLER(mulhhwu
, 0x08, 0x00),
9728 GEN_MAC_HANDLER(mullhw
, 0x08, 0x0D),
9729 GEN_MAC_HANDLER(mullhwu
, 0x08, 0x0C),
9735 #define GEN_VR_LDX(name, opc2, opc3) \
9736 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9737 #define GEN_VR_STX(name, opc2, opc3) \
9738 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9739 #define GEN_VR_LVE(name, opc2, opc3) \
9740 GEN_HANDLER(lve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9741 #define GEN_VR_STVE(name, opc2, opc3) \
9742 GEN_HANDLER(stve##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC)
9743 GEN_VR_LDX(lvx
, 0x07, 0x03),
9744 GEN_VR_LDX(lvxl
, 0x07, 0x0B),
9745 GEN_VR_LVE(bx
, 0x07, 0x00),
9746 GEN_VR_LVE(hx
, 0x07, 0x01),
9747 GEN_VR_LVE(wx
, 0x07, 0x02),
9748 GEN_VR_STX(svx
, 0x07, 0x07),
9749 GEN_VR_STX(svxl
, 0x07, 0x0F),
9750 GEN_VR_STVE(bx
, 0x07, 0x04),
9751 GEN_VR_STVE(hx
, 0x07, 0x05),
9752 GEN_VR_STVE(wx
, 0x07, 0x06),
9754 #undef GEN_VX_LOGICAL
9755 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
9756 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9757 GEN_VX_LOGICAL(vand
, tcg_gen_and_i64
, 2, 16),
9758 GEN_VX_LOGICAL(vandc
, tcg_gen_andc_i64
, 2, 17),
9759 GEN_VX_LOGICAL(vor
, tcg_gen_or_i64
, 2, 18),
9760 GEN_VX_LOGICAL(vxor
, tcg_gen_xor_i64
, 2, 19),
9761 GEN_VX_LOGICAL(vnor
, tcg_gen_nor_i64
, 2, 20),
9764 #define GEN_VXFORM(name, opc2, opc3) \
9765 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9766 GEN_VXFORM(vaddubm
, 0, 0),
9767 GEN_VXFORM(vadduhm
, 0, 1),
9768 GEN_VXFORM(vadduwm
, 0, 2),
9769 GEN_VXFORM(vsububm
, 0, 16),
9770 GEN_VXFORM(vsubuhm
, 0, 17),
9771 GEN_VXFORM(vsubuwm
, 0, 18),
9772 GEN_VXFORM(vmaxub
, 1, 0),
9773 GEN_VXFORM(vmaxuh
, 1, 1),
9774 GEN_VXFORM(vmaxuw
, 1, 2),
9775 GEN_VXFORM(vmaxsb
, 1, 4),
9776 GEN_VXFORM(vmaxsh
, 1, 5),
9777 GEN_VXFORM(vmaxsw
, 1, 6),
9778 GEN_VXFORM(vminub
, 1, 8),
9779 GEN_VXFORM(vminuh
, 1, 9),
9780 GEN_VXFORM(vminuw
, 1, 10),
9781 GEN_VXFORM(vminsb
, 1, 12),
9782 GEN_VXFORM(vminsh
, 1, 13),
9783 GEN_VXFORM(vminsw
, 1, 14),
9784 GEN_VXFORM(vavgub
, 1, 16),
9785 GEN_VXFORM(vavguh
, 1, 17),
9786 GEN_VXFORM(vavguw
, 1, 18),
9787 GEN_VXFORM(vavgsb
, 1, 20),
9788 GEN_VXFORM(vavgsh
, 1, 21),
9789 GEN_VXFORM(vavgsw
, 1, 22),
9790 GEN_VXFORM(vmrghb
, 6, 0),
9791 GEN_VXFORM(vmrghh
, 6, 1),
9792 GEN_VXFORM(vmrghw
, 6, 2),
9793 GEN_VXFORM(vmrglb
, 6, 4),
9794 GEN_VXFORM(vmrglh
, 6, 5),
9795 GEN_VXFORM(vmrglw
, 6, 6),
9796 GEN_VXFORM(vmuloub
, 4, 0),
9797 GEN_VXFORM(vmulouh
, 4, 1),
9798 GEN_VXFORM(vmulosb
, 4, 4),
9799 GEN_VXFORM(vmulosh
, 4, 5),
9800 GEN_VXFORM(vmuleub
, 4, 8),
9801 GEN_VXFORM(vmuleuh
, 4, 9),
9802 GEN_VXFORM(vmulesb
, 4, 12),
9803 GEN_VXFORM(vmulesh
, 4, 13),
9804 GEN_VXFORM(vslb
, 2, 4),
9805 GEN_VXFORM(vslh
, 2, 5),
9806 GEN_VXFORM(vslw
, 2, 6),
9807 GEN_VXFORM(vsrb
, 2, 8),
9808 GEN_VXFORM(vsrh
, 2, 9),
9809 GEN_VXFORM(vsrw
, 2, 10),
9810 GEN_VXFORM(vsrab
, 2, 12),
9811 GEN_VXFORM(vsrah
, 2, 13),
9812 GEN_VXFORM(vsraw
, 2, 14),
9813 GEN_VXFORM(vslo
, 6, 16),
9814 GEN_VXFORM(vsro
, 6, 17),
9815 GEN_VXFORM(vaddcuw
, 0, 6),
9816 GEN_VXFORM(vsubcuw
, 0, 22),
9817 GEN_VXFORM(vaddubs
, 0, 8),
9818 GEN_VXFORM(vadduhs
, 0, 9),
9819 GEN_VXFORM(vadduws
, 0, 10),
9820 GEN_VXFORM(vaddsbs
, 0, 12),
9821 GEN_VXFORM(vaddshs
, 0, 13),
9822 GEN_VXFORM(vaddsws
, 0, 14),
9823 GEN_VXFORM(vsububs
, 0, 24),
9824 GEN_VXFORM(vsubuhs
, 0, 25),
9825 GEN_VXFORM(vsubuws
, 0, 26),
9826 GEN_VXFORM(vsubsbs
, 0, 28),
9827 GEN_VXFORM(vsubshs
, 0, 29),
9828 GEN_VXFORM(vsubsws
, 0, 30),
9829 GEN_VXFORM(vrlb
, 2, 0),
9830 GEN_VXFORM(vrlh
, 2, 1),
9831 GEN_VXFORM(vrlw
, 2, 2),
9832 GEN_VXFORM(vsl
, 2, 7),
9833 GEN_VXFORM(vsr
, 2, 11),
9834 GEN_VXFORM(vpkuhum
, 7, 0),
9835 GEN_VXFORM(vpkuwum
, 7, 1),
9836 GEN_VXFORM(vpkuhus
, 7, 2),
9837 GEN_VXFORM(vpkuwus
, 7, 3),
9838 GEN_VXFORM(vpkshus
, 7, 4),
9839 GEN_VXFORM(vpkswus
, 7, 5),
9840 GEN_VXFORM(vpkshss
, 7, 6),
9841 GEN_VXFORM(vpkswss
, 7, 7),
9842 GEN_VXFORM(vpkpx
, 7, 12),
9843 GEN_VXFORM(vsum4ubs
, 4, 24),
9844 GEN_VXFORM(vsum4sbs
, 4, 28),
9845 GEN_VXFORM(vsum4shs
, 4, 25),
9846 GEN_VXFORM(vsum2sws
, 4, 26),
9847 GEN_VXFORM(vsumsws
, 4, 30),
9848 GEN_VXFORM(vaddfp
, 5, 0),
9849 GEN_VXFORM(vsubfp
, 5, 1),
9850 GEN_VXFORM(vmaxfp
, 5, 16),
9851 GEN_VXFORM(vminfp
, 5, 17),
9855 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
9856 GEN_HANDLER2(name, str, 0x4, opc2, opc3, 0x00000000, PPC_ALTIVEC),
9857 #define GEN_VXRFORM(name, opc2, opc3) \
9858 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
9859 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
9860 GEN_VXRFORM(vcmpequb
, 3, 0)
9861 GEN_VXRFORM(vcmpequh
, 3, 1)
9862 GEN_VXRFORM(vcmpequw
, 3, 2)
9863 GEN_VXRFORM(vcmpgtsb
, 3, 12)
9864 GEN_VXRFORM(vcmpgtsh
, 3, 13)
9865 GEN_VXRFORM(vcmpgtsw
, 3, 14)
9866 GEN_VXRFORM(vcmpgtub
, 3, 8)
9867 GEN_VXRFORM(vcmpgtuh
, 3, 9)
9868 GEN_VXRFORM(vcmpgtuw
, 3, 10)
9869 GEN_VXRFORM(vcmpeqfp
, 3, 3)
9870 GEN_VXRFORM(vcmpgefp
, 3, 7)
9871 GEN_VXRFORM(vcmpgtfp
, 3, 11)
9872 GEN_VXRFORM(vcmpbfp
, 3, 15)
9874 #undef GEN_VXFORM_SIMM
9875 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
9876 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9877 GEN_VXFORM_SIMM(vspltisb
, 6, 12),
9878 GEN_VXFORM_SIMM(vspltish
, 6, 13),
9879 GEN_VXFORM_SIMM(vspltisw
, 6, 14),
9881 #undef GEN_VXFORM_NOA
9882 #define GEN_VXFORM_NOA(name, opc2, opc3) \
9883 GEN_HANDLER(name, 0x04, opc2, opc3, 0x001f0000, PPC_ALTIVEC)
9884 GEN_VXFORM_NOA(vupkhsb
, 7, 8),
9885 GEN_VXFORM_NOA(vupkhsh
, 7, 9),
9886 GEN_VXFORM_NOA(vupklsb
, 7, 10),
9887 GEN_VXFORM_NOA(vupklsh
, 7, 11),
9888 GEN_VXFORM_NOA(vupkhpx
, 7, 13),
9889 GEN_VXFORM_NOA(vupklpx
, 7, 15),
9890 GEN_VXFORM_NOA(vrefp
, 5, 4),
9891 GEN_VXFORM_NOA(vrsqrtefp
, 5, 5),
9892 GEN_VXFORM_NOA(vexptefp
, 5, 6),
9893 GEN_VXFORM_NOA(vlogefp
, 5, 7),
9894 GEN_VXFORM_NOA(vrfim
, 5, 8),
9895 GEN_VXFORM_NOA(vrfin
, 5, 9),
9896 GEN_VXFORM_NOA(vrfip
, 5, 10),
9897 GEN_VXFORM_NOA(vrfiz
, 5, 11),
9899 #undef GEN_VXFORM_UIMM
9900 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
9901 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_ALTIVEC)
9902 GEN_VXFORM_UIMM(vspltb
, 6, 8),
9903 GEN_VXFORM_UIMM(vsplth
, 6, 9),
9904 GEN_VXFORM_UIMM(vspltw
, 6, 10),
9905 GEN_VXFORM_UIMM(vcfux
, 5, 12),
9906 GEN_VXFORM_UIMM(vcfsx
, 5, 13),
9907 GEN_VXFORM_UIMM(vctuxs
, 5, 14),
9908 GEN_VXFORM_UIMM(vctsxs
, 5, 15),
9910 #undef GEN_VAFORM_PAIRED
9911 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
9912 GEN_HANDLER(name0##_##name1, 0x04, opc2, 0xFF, 0x00000000, PPC_ALTIVEC)
9913 GEN_VAFORM_PAIRED(vmhaddshs
, vmhraddshs
, 16),
9914 GEN_VAFORM_PAIRED(vmsumubm
, vmsummbm
, 18),
9915 GEN_VAFORM_PAIRED(vmsumuhm
, vmsumuhs
, 19),
9916 GEN_VAFORM_PAIRED(vmsumshm
, vmsumshs
, 20),
9917 GEN_VAFORM_PAIRED(vsel
, vperm
, 21),
9918 GEN_VAFORM_PAIRED(vmaddfp
, vnmsubfp
, 23),
9920 GEN_HANDLER_E(lxsdx
, 0x1F, 0x0C, 0x12, 0, PPC_NONE
, PPC2_VSX
),
9921 GEN_HANDLER_E(lxvd2x
, 0x1F, 0x0C, 0x1A, 0, PPC_NONE
, PPC2_VSX
),
9922 GEN_HANDLER_E(lxvdsx
, 0x1F, 0x0C, 0x0A, 0, PPC_NONE
, PPC2_VSX
),
9923 GEN_HANDLER_E(lxvw4x
, 0x1F, 0x0C, 0x18, 0, PPC_NONE
, PPC2_VSX
),
9925 GEN_HANDLER_E(stxsdx
, 0x1F, 0xC, 0x16, 0, PPC_NONE
, PPC2_VSX
),
9926 GEN_HANDLER_E(stxvd2x
, 0x1F, 0xC, 0x1E, 0, PPC_NONE
, PPC2_VSX
),
9927 GEN_HANDLER_E(stxvw4x
, 0x1F, 0xC, 0x1C, 0, PPC_NONE
, PPC2_VSX
),
9930 #define GEN_XX2FORM(name, opc2, opc3, fl2) \
9931 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
9932 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2)
9935 #define GEN_XX3FORM(name, opc2, opc3, fl2) \
9936 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 0, opc3, 0, PPC_NONE, fl2), \
9937 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 1, opc3, 0, PPC_NONE, fl2), \
9938 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 2, opc3, 0, PPC_NONE, fl2), \
9939 GEN_HANDLER2_E(name, #name, 0x3C, opc2 | 3, opc3, 0, PPC_NONE, fl2)
9941 #undef GEN_XX3FORM_DM
9942 #define GEN_XX3FORM_DM(name, opc2, opc3) \
9943 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
9944 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
9945 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
9946 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x00, 0, PPC_NONE, PPC2_VSX),\
9947 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
9948 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
9949 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
9950 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x04, 0, PPC_NONE, PPC2_VSX),\
9951 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
9952 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
9953 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
9954 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x08, 0, PPC_NONE, PPC2_VSX),\
9955 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x00, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
9956 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x01, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
9957 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x02, opc3|0x0C, 0, PPC_NONE, PPC2_VSX),\
9958 GEN_HANDLER2_E(name, #name, 0x3C, opc2|0x03, opc3|0x0C, 0, PPC_NONE, PPC2_VSX)
9960 GEN_XX2FORM(xsabsdp
, 0x12, 0x15, PPC2_VSX
),
9961 GEN_XX2FORM(xsnabsdp
, 0x12, 0x16, PPC2_VSX
),
9962 GEN_XX2FORM(xsnegdp
, 0x12, 0x17, PPC2_VSX
),
9963 GEN_XX3FORM(xscpsgndp
, 0x00, 0x16, PPC2_VSX
),
9965 GEN_XX2FORM(xvabsdp
, 0x12, 0x1D, PPC2_VSX
),
9966 GEN_XX2FORM(xvnabsdp
, 0x12, 0x1E, PPC2_VSX
),
9967 GEN_XX2FORM(xvnegdp
, 0x12, 0x1F, PPC2_VSX
),
9968 GEN_XX3FORM(xvcpsgndp
, 0x00, 0x1E, PPC2_VSX
),
9969 GEN_XX2FORM(xvabssp
, 0x12, 0x19, PPC2_VSX
),
9970 GEN_XX2FORM(xvnabssp
, 0x12, 0x1A, PPC2_VSX
),
9971 GEN_XX2FORM(xvnegsp
, 0x12, 0x1B, PPC2_VSX
),
9972 GEN_XX3FORM(xvcpsgnsp
, 0x00, 0x1A, PPC2_VSX
),
9975 #define VSX_LOGICAL(name, opc2, opc3, fl2) \
9976 GEN_XX3FORM(name, opc2, opc3, fl2)
9978 VSX_LOGICAL(xxland
, 0x8, 0x10, PPC2_VSX
),
9979 VSX_LOGICAL(xxlandc
, 0x8, 0x11, PPC2_VSX
),
9980 VSX_LOGICAL(xxlor
, 0x8, 0x12, PPC2_VSX
),
9981 VSX_LOGICAL(xxlxor
, 0x8, 0x13, PPC2_VSX
),
9982 VSX_LOGICAL(xxlnor
, 0x8, 0x14, PPC2_VSX
),
9983 GEN_XX3FORM(xxmrghw
, 0x08, 0x02, PPC2_VSX
),
9984 GEN_XX3FORM(xxmrglw
, 0x08, 0x06, PPC2_VSX
),
9985 GEN_XX2FORM(xxspltw
, 0x08, 0x0A, PPC2_VSX
),
9986 GEN_XX3FORM_DM(xxsldwi
, 0x08, 0x00),
9988 #define GEN_XXSEL_ROW(opc3) \
9989 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x18, opc3, 0, PPC_NONE, PPC2_VSX), \
9990 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x19, opc3, 0, PPC_NONE, PPC2_VSX), \
9991 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1A, opc3, 0, PPC_NONE, PPC2_VSX), \
9992 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1B, opc3, 0, PPC_NONE, PPC2_VSX), \
9993 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1C, opc3, 0, PPC_NONE, PPC2_VSX), \
9994 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1D, opc3, 0, PPC_NONE, PPC2_VSX), \
9995 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1E, opc3, 0, PPC_NONE, PPC2_VSX), \
9996 GEN_HANDLER2_E(xxsel, "xxsel", 0x3C, 0x1F, opc3, 0, PPC_NONE, PPC2_VSX), \
10000 GEN_XXSEL_ROW(0x02)
10001 GEN_XXSEL_ROW(0x03)
10002 GEN_XXSEL_ROW(0x04)
10003 GEN_XXSEL_ROW(0x05)
10004 GEN_XXSEL_ROW(0x06)
10005 GEN_XXSEL_ROW(0x07)
10006 GEN_XXSEL_ROW(0x08)
10007 GEN_XXSEL_ROW(0x09)
10008 GEN_XXSEL_ROW(0x0A)
10009 GEN_XXSEL_ROW(0x0B)
10010 GEN_XXSEL_ROW(0x0C)
10011 GEN_XXSEL_ROW(0x0D)
10012 GEN_XXSEL_ROW(0x0E)
10013 GEN_XXSEL_ROW(0x0F)
10014 GEN_XXSEL_ROW(0x10)
10015 GEN_XXSEL_ROW(0x11)
10016 GEN_XXSEL_ROW(0x12)
10017 GEN_XXSEL_ROW(0x13)
10018 GEN_XXSEL_ROW(0x14)
10019 GEN_XXSEL_ROW(0x15)
10020 GEN_XXSEL_ROW(0x16)
10021 GEN_XXSEL_ROW(0x17)
10022 GEN_XXSEL_ROW(0x18)
10023 GEN_XXSEL_ROW(0x19)
10024 GEN_XXSEL_ROW(0x1A)
10025 GEN_XXSEL_ROW(0x1B)
10026 GEN_XXSEL_ROW(0x1C)
10027 GEN_XXSEL_ROW(0x1D)
10028 GEN_XXSEL_ROW(0x1E)
10029 GEN_XXSEL_ROW(0x1F)
10031 GEN_XX3FORM_DM(xxpermdi
, 0x08, 0x01),
10034 #define GEN_SPE(name0, name1, opc2, opc3, inval0, inval1, type) \
10035 GEN_OPCODE_DUAL(name0##_##name1, 0x04, opc2, opc3, inval0, inval1, type, PPC_NONE)
10036 GEN_SPE(evaddw
, speundef
, 0x00, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10037 GEN_SPE(evaddiw
, speundef
, 0x01, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10038 GEN_SPE(evsubfw
, speundef
, 0x02, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10039 GEN_SPE(evsubifw
, speundef
, 0x03, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10040 GEN_SPE(evabs
, evneg
, 0x04, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
10041 GEN_SPE(evextsb
, evextsh
, 0x05, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
10042 GEN_SPE(evrndw
, evcntlzw
, 0x06, 0x08, 0x0000F800, 0x0000F800, PPC_SPE
),
10043 GEN_SPE(evcntlsw
, brinc
, 0x07, 0x08, 0x0000F800, 0x00000000, PPC_SPE
),
10044 GEN_SPE(evmra
, speundef
, 0x02, 0x13, 0x0000F800, 0xFFFFFFFF, PPC_SPE
),
10045 GEN_SPE(speundef
, evand
, 0x08, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
),
10046 GEN_SPE(evandc
, speundef
, 0x09, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10047 GEN_SPE(evxor
, evor
, 0x0B, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10048 GEN_SPE(evnor
, eveqv
, 0x0C, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10049 GEN_SPE(evmwumi
, evmwsmi
, 0x0C, 0x11, 0x00000000, 0x00000000, PPC_SPE
),
10050 GEN_SPE(evmwumia
, evmwsmia
, 0x1C, 0x11, 0x00000000, 0x00000000, PPC_SPE
),
10051 GEN_SPE(evmwumiaa
, evmwsmiaa
, 0x0C, 0x15, 0x00000000, 0x00000000, PPC_SPE
),
10052 GEN_SPE(speundef
, evorc
, 0x0D, 0x08, 0xFFFFFFFF, 0x00000000, PPC_SPE
),
10053 GEN_SPE(evnand
, speundef
, 0x0F, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10054 GEN_SPE(evsrwu
, evsrws
, 0x10, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10055 GEN_SPE(evsrwiu
, evsrwis
, 0x11, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10056 GEN_SPE(evslw
, speundef
, 0x12, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10057 GEN_SPE(evslwi
, speundef
, 0x13, 0x08, 0x00000000, 0xFFFFFFFF, PPC_SPE
),
10058 GEN_SPE(evrlw
, evsplati
, 0x14, 0x08, 0x00000000, 0x0000F800, PPC_SPE
),
10059 GEN_SPE(evrlwi
, evsplatfi
, 0x15, 0x08, 0x00000000, 0x0000F800, PPC_SPE
),
10060 GEN_SPE(evmergehi
, evmergelo
, 0x16, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10061 GEN_SPE(evmergehilo
, evmergelohi
, 0x17, 0x08, 0x00000000, 0x00000000, PPC_SPE
),
10062 GEN_SPE(evcmpgtu
, evcmpgts
, 0x18, 0x08, 0x00600000, 0x00600000, PPC_SPE
),
10063 GEN_SPE(evcmpltu
, evcmplts
, 0x19, 0x08, 0x00600000, 0x00600000, PPC_SPE
),
10064 GEN_SPE(evcmpeq
, speundef
, 0x1A, 0x08, 0x00600000, 0xFFFFFFFF, PPC_SPE
),
10066 GEN_SPE(evfsadd
, evfssub
, 0x00, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
10067 GEN_SPE(evfsabs
, evfsnabs
, 0x02, 0x0A, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
),
10068 GEN_SPE(evfsneg
, speundef
, 0x03, 0x0A, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10069 GEN_SPE(evfsmul
, evfsdiv
, 0x04, 0x0A, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
10070 GEN_SPE(evfscmpgt
, evfscmplt
, 0x06, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
10071 GEN_SPE(evfscmpeq
, speundef
, 0x07, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10072 GEN_SPE(evfscfui
, evfscfsi
, 0x08, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10073 GEN_SPE(evfscfuf
, evfscfsf
, 0x09, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10074 GEN_SPE(evfsctui
, evfsctsi
, 0x0A, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10075 GEN_SPE(evfsctuf
, evfsctsf
, 0x0B, 0x0A, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10076 GEN_SPE(evfsctuiz
, speundef
, 0x0C, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10077 GEN_SPE(evfsctsiz
, speundef
, 0x0D, 0x0A, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10078 GEN_SPE(evfststgt
, evfststlt
, 0x0E, 0x0A, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
10079 GEN_SPE(evfststeq
, speundef
, 0x0F, 0x0A, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10081 GEN_SPE(efsadd
, efssub
, 0x00, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
10082 GEN_SPE(efsabs
, efsnabs
, 0x02, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_SINGLE
),
10083 GEN_SPE(efsneg
, speundef
, 0x03, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10084 GEN_SPE(efsmul
, efsdiv
, 0x04, 0x0B, 0x00000000, 0x00000000, PPC_SPE_SINGLE
),
10085 GEN_SPE(efscmpgt
, efscmplt
, 0x06, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
10086 GEN_SPE(efscmpeq
, efscfd
, 0x07, 0x0B, 0x00600000, 0x00180000, PPC_SPE_SINGLE
),
10087 GEN_SPE(efscfui
, efscfsi
, 0x08, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10088 GEN_SPE(efscfuf
, efscfsf
, 0x09, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10089 GEN_SPE(efsctui
, efsctsi
, 0x0A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10090 GEN_SPE(efsctuf
, efsctsf
, 0x0B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_SINGLE
),
10091 GEN_SPE(efsctuiz
, speundef
, 0x0C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10092 GEN_SPE(efsctsiz
, speundef
, 0x0D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10093 GEN_SPE(efststgt
, efststlt
, 0x0E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_SINGLE
),
10094 GEN_SPE(efststeq
, speundef
, 0x0F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_SINGLE
),
10096 GEN_SPE(efdadd
, efdsub
, 0x10, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
),
10097 GEN_SPE(efdcfuid
, efdcfsid
, 0x11, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10098 GEN_SPE(efdabs
, efdnabs
, 0x12, 0x0B, 0x0000F800, 0x0000F800, PPC_SPE_DOUBLE
),
10099 GEN_SPE(efdneg
, speundef
, 0x13, 0x0B, 0x0000F800, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
10100 GEN_SPE(efdmul
, efddiv
, 0x14, 0x0B, 0x00000000, 0x00000000, PPC_SPE_DOUBLE
),
10101 GEN_SPE(efdctuidz
, efdctsidz
, 0x15, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10102 GEN_SPE(efdcmpgt
, efdcmplt
, 0x16, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
),
10103 GEN_SPE(efdcmpeq
, efdcfs
, 0x17, 0x0B, 0x00600000, 0x00180000, PPC_SPE_DOUBLE
),
10104 GEN_SPE(efdcfui
, efdcfsi
, 0x18, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10105 GEN_SPE(efdcfuf
, efdcfsf
, 0x19, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10106 GEN_SPE(efdctui
, efdctsi
, 0x1A, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10107 GEN_SPE(efdctuf
, efdctsf
, 0x1B, 0x0B, 0x00180000, 0x00180000, PPC_SPE_DOUBLE
),
10108 GEN_SPE(efdctuiz
, speundef
, 0x1C, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
10109 GEN_SPE(efdctsiz
, speundef
, 0x1D, 0x0B, 0x00180000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
10110 GEN_SPE(efdtstgt
, efdtstlt
, 0x1E, 0x0B, 0x00600000, 0x00600000, PPC_SPE_DOUBLE
),
10111 GEN_SPE(efdtsteq
, speundef
, 0x1F, 0x0B, 0x00600000, 0xFFFFFFFF, PPC_SPE_DOUBLE
),
10113 #undef GEN_SPEOP_LDST
10114 #define GEN_SPEOP_LDST(name, opc2, sh) \
10115 GEN_HANDLER(name, 0x04, opc2, 0x0C, 0x00000000, PPC_SPE)
10116 GEN_SPEOP_LDST(evldd
, 0x00, 3),
10117 GEN_SPEOP_LDST(evldw
, 0x01, 3),
10118 GEN_SPEOP_LDST(evldh
, 0x02, 3),
10119 GEN_SPEOP_LDST(evlhhesplat
, 0x04, 1),
10120 GEN_SPEOP_LDST(evlhhousplat
, 0x06, 1),
10121 GEN_SPEOP_LDST(evlhhossplat
, 0x07, 1),
10122 GEN_SPEOP_LDST(evlwhe
, 0x08, 2),
10123 GEN_SPEOP_LDST(evlwhou
, 0x0A, 2),
10124 GEN_SPEOP_LDST(evlwhos
, 0x0B, 2),
10125 GEN_SPEOP_LDST(evlwwsplat
, 0x0C, 2),
10126 GEN_SPEOP_LDST(evlwhsplat
, 0x0E, 2),
10128 GEN_SPEOP_LDST(evstdd
, 0x10, 3),
10129 GEN_SPEOP_LDST(evstdw
, 0x11, 3),
10130 GEN_SPEOP_LDST(evstdh
, 0x12, 3),
10131 GEN_SPEOP_LDST(evstwhe
, 0x18, 2),
10132 GEN_SPEOP_LDST(evstwho
, 0x1A, 2),
10133 GEN_SPEOP_LDST(evstwwe
, 0x1C, 2),
10134 GEN_SPEOP_LDST(evstwwo
, 0x1E, 2),
10137 #include "helper_regs.h"
10138 #include "translate_init.c"
10140 /*****************************************************************************/
10141 /* Misc PowerPC helpers */
10142 void ppc_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
10148 PowerPCCPU
*cpu
= POWERPC_CPU(cs
);
10149 CPUPPCState
*env
= &cpu
->env
;
10152 cpu_fprintf(f
, "NIP " TARGET_FMT_lx
" LR " TARGET_FMT_lx
" CTR "
10153 TARGET_FMT_lx
" XER " TARGET_FMT_lx
"\n",
10154 env
->nip
, env
->lr
, env
->ctr
, cpu_read_xer(env
));
10155 cpu_fprintf(f
, "MSR " TARGET_FMT_lx
" HID0 " TARGET_FMT_lx
" HF "
10156 TARGET_FMT_lx
" idx %d\n", env
->msr
, env
->spr
[SPR_HID0
],
10157 env
->hflags
, env
->mmu_idx
);
10158 #if !defined(NO_TIMER_DUMP)
10159 cpu_fprintf(f
, "TB %08" PRIu32
" %08" PRIu64
10160 #if !defined(CONFIG_USER_ONLY)
10164 cpu_ppc_load_tbu(env
), cpu_ppc_load_tbl(env
)
10165 #if !defined(CONFIG_USER_ONLY)
10166 , cpu_ppc_load_decr(env
)
10170 for (i
= 0; i
< 32; i
++) {
10171 if ((i
& (RGPL
- 1)) == 0)
10172 cpu_fprintf(f
, "GPR%02d", i
);
10173 cpu_fprintf(f
, " %016" PRIx64
, ppc_dump_gpr(env
, i
));
10174 if ((i
& (RGPL
- 1)) == (RGPL
- 1))
10175 cpu_fprintf(f
, "\n");
10177 cpu_fprintf(f
, "CR ");
10178 for (i
= 0; i
< 8; i
++)
10179 cpu_fprintf(f
, "%01x", env
->crf
[i
]);
10180 cpu_fprintf(f
, " [");
10181 for (i
= 0; i
< 8; i
++) {
10183 if (env
->crf
[i
] & 0x08)
10185 else if (env
->crf
[i
] & 0x04)
10187 else if (env
->crf
[i
] & 0x02)
10189 cpu_fprintf(f
, " %c%c", a
, env
->crf
[i
] & 0x01 ? 'O' : ' ');
10191 cpu_fprintf(f
, " ] RES " TARGET_FMT_lx
"\n",
10192 env
->reserve_addr
);
10193 for (i
= 0; i
< 32; i
++) {
10194 if ((i
& (RFPL
- 1)) == 0)
10195 cpu_fprintf(f
, "FPR%02d", i
);
10196 cpu_fprintf(f
, " %016" PRIx64
, *((uint64_t *)&env
->fpr
[i
]));
10197 if ((i
& (RFPL
- 1)) == (RFPL
- 1))
10198 cpu_fprintf(f
, "\n");
10200 cpu_fprintf(f
, "FPSCR " TARGET_FMT_lx
"\n", env
->fpscr
);
10201 #if !defined(CONFIG_USER_ONLY)
10202 cpu_fprintf(f
, " SRR0 " TARGET_FMT_lx
" SRR1 " TARGET_FMT_lx
10203 " PVR " TARGET_FMT_lx
" VRSAVE " TARGET_FMT_lx
"\n",
10204 env
->spr
[SPR_SRR0
], env
->spr
[SPR_SRR1
],
10205 env
->spr
[SPR_PVR
], env
->spr
[SPR_VRSAVE
]);
10207 cpu_fprintf(f
, "SPRG0 " TARGET_FMT_lx
" SPRG1 " TARGET_FMT_lx
10208 " SPRG2 " TARGET_FMT_lx
" SPRG3 " TARGET_FMT_lx
"\n",
10209 env
->spr
[SPR_SPRG0
], env
->spr
[SPR_SPRG1
],
10210 env
->spr
[SPR_SPRG2
], env
->spr
[SPR_SPRG3
]);
10212 cpu_fprintf(f
, "SPRG4 " TARGET_FMT_lx
" SPRG5 " TARGET_FMT_lx
10213 " SPRG6 " TARGET_FMT_lx
" SPRG7 " TARGET_FMT_lx
"\n",
10214 env
->spr
[SPR_SPRG4
], env
->spr
[SPR_SPRG5
],
10215 env
->spr
[SPR_SPRG6
], env
->spr
[SPR_SPRG7
]);
10217 if (env
->excp_model
== POWERPC_EXCP_BOOKE
) {
10218 cpu_fprintf(f
, "CSRR0 " TARGET_FMT_lx
" CSRR1 " TARGET_FMT_lx
10219 " MCSRR0 " TARGET_FMT_lx
" MCSRR1 " TARGET_FMT_lx
"\n",
10220 env
->spr
[SPR_BOOKE_CSRR0
], env
->spr
[SPR_BOOKE_CSRR1
],
10221 env
->spr
[SPR_BOOKE_MCSRR0
], env
->spr
[SPR_BOOKE_MCSRR1
]);
10223 cpu_fprintf(f
, " TCR " TARGET_FMT_lx
" TSR " TARGET_FMT_lx
10224 " ESR " TARGET_FMT_lx
" DEAR " TARGET_FMT_lx
"\n",
10225 env
->spr
[SPR_BOOKE_TCR
], env
->spr
[SPR_BOOKE_TSR
],
10226 env
->spr
[SPR_BOOKE_ESR
], env
->spr
[SPR_BOOKE_DEAR
]);
10228 cpu_fprintf(f
, " PIR " TARGET_FMT_lx
" DECAR " TARGET_FMT_lx
10229 " IVPR " TARGET_FMT_lx
" EPCR " TARGET_FMT_lx
"\n",
10230 env
->spr
[SPR_BOOKE_PIR
], env
->spr
[SPR_BOOKE_DECAR
],
10231 env
->spr
[SPR_BOOKE_IVPR
], env
->spr
[SPR_BOOKE_EPCR
]);
10233 cpu_fprintf(f
, " MCSR " TARGET_FMT_lx
" SPRG8 " TARGET_FMT_lx
10234 " EPR " TARGET_FMT_lx
"\n",
10235 env
->spr
[SPR_BOOKE_MCSR
], env
->spr
[SPR_BOOKE_SPRG8
],
10236 env
->spr
[SPR_BOOKE_EPR
]);
10239 cpu_fprintf(f
, " MCAR " TARGET_FMT_lx
" PID1 " TARGET_FMT_lx
10240 " PID2 " TARGET_FMT_lx
" SVR " TARGET_FMT_lx
"\n",
10241 env
->spr
[SPR_Exxx_MCAR
], env
->spr
[SPR_BOOKE_PID1
],
10242 env
->spr
[SPR_BOOKE_PID2
], env
->spr
[SPR_E500_SVR
]);
10245 * IVORs are left out as they are large and do not change often --
10246 * they can be read with "p $ivor0", "p $ivor1", etc.
10250 #if defined(TARGET_PPC64)
10251 if (env
->flags
& POWERPC_FLAG_CFAR
) {
10252 cpu_fprintf(f
, " CFAR " TARGET_FMT_lx
"\n", env
->cfar
);
10256 switch (env
->mmu_model
) {
10257 case POWERPC_MMU_32B
:
10258 case POWERPC_MMU_601
:
10259 case POWERPC_MMU_SOFT_6xx
:
10260 case POWERPC_MMU_SOFT_74xx
:
10261 #if defined(TARGET_PPC64)
10262 case POWERPC_MMU_64B
:
10264 cpu_fprintf(f
, " SDR1 " TARGET_FMT_lx
"\n", env
->spr
[SPR_SDR1
]);
10266 case POWERPC_MMU_BOOKE206
:
10267 cpu_fprintf(f
, " MAS0 " TARGET_FMT_lx
" MAS1 " TARGET_FMT_lx
10268 " MAS2 " TARGET_FMT_lx
" MAS3 " TARGET_FMT_lx
"\n",
10269 env
->spr
[SPR_BOOKE_MAS0
], env
->spr
[SPR_BOOKE_MAS1
],
10270 env
->spr
[SPR_BOOKE_MAS2
], env
->spr
[SPR_BOOKE_MAS3
]);
10272 cpu_fprintf(f
, " MAS4 " TARGET_FMT_lx
" MAS6 " TARGET_FMT_lx
10273 " MAS7 " TARGET_FMT_lx
" PID " TARGET_FMT_lx
"\n",
10274 env
->spr
[SPR_BOOKE_MAS4
], env
->spr
[SPR_BOOKE_MAS6
],
10275 env
->spr
[SPR_BOOKE_MAS7
], env
->spr
[SPR_BOOKE_PID
]);
10277 cpu_fprintf(f
, "MMUCFG " TARGET_FMT_lx
" TLB0CFG " TARGET_FMT_lx
10278 " TLB1CFG " TARGET_FMT_lx
"\n",
10279 env
->spr
[SPR_MMUCFG
], env
->spr
[SPR_BOOKE_TLB0CFG
],
10280 env
->spr
[SPR_BOOKE_TLB1CFG
]);
10291 void ppc_cpu_dump_statistics(CPUState
*cs
, FILE*f
,
10292 fprintf_function cpu_fprintf
, int flags
)
10294 #if defined(DO_PPC_STATISTICS)
10295 PowerPCCPU
*cpu
= POWERPC_CPU(cs
);
10296 opc_handler_t
**t1
, **t2
, **t3
, *handler
;
10299 t1
= cpu
->env
.opcodes
;
10300 for (op1
= 0; op1
< 64; op1
++) {
10302 if (is_indirect_opcode(handler
)) {
10303 t2
= ind_table(handler
);
10304 for (op2
= 0; op2
< 32; op2
++) {
10306 if (is_indirect_opcode(handler
)) {
10307 t3
= ind_table(handler
);
10308 for (op3
= 0; op3
< 32; op3
++) {
10310 if (handler
->count
== 0)
10312 cpu_fprintf(f
, "%02x %02x %02x (%02x %04d) %16s: "
10313 "%016" PRIx64
" %" PRId64
"\n",
10314 op1
, op2
, op3
, op1
, (op3
<< 5) | op2
,
10316 handler
->count
, handler
->count
);
10319 if (handler
->count
== 0)
10321 cpu_fprintf(f
, "%02x %02x (%02x %04d) %16s: "
10322 "%016" PRIx64
" %" PRId64
"\n",
10323 op1
, op2
, op1
, op2
, handler
->oname
,
10324 handler
->count
, handler
->count
);
10328 if (handler
->count
== 0)
10330 cpu_fprintf(f
, "%02x (%02x ) %16s: %016" PRIx64
10332 op1
, op1
, handler
->oname
,
10333 handler
->count
, handler
->count
);
10339 /*****************************************************************************/
10340 static inline void gen_intermediate_code_internal(PowerPCCPU
*cpu
,
10341 TranslationBlock
*tb
,
10344 CPUState
*cs
= CPU(cpu
);
10345 CPUPPCState
*env
= &cpu
->env
;
10346 DisasContext ctx
, *ctxp
= &ctx
;
10347 opc_handler_t
**table
, *handler
;
10348 target_ulong pc_start
;
10349 uint16_t *gen_opc_end
;
10356 gen_opc_end
= tcg_ctx
.gen_opc_buf
+ OPC_MAX_SIZE
;
10357 ctx
.nip
= pc_start
;
10359 ctx
.exception
= POWERPC_EXCP_NONE
;
10360 ctx
.spr_cb
= env
->spr_cb
;
10361 ctx
.mem_idx
= env
->mmu_idx
;
10362 ctx
.insns_flags
= env
->insns_flags
;
10363 ctx
.insns_flags2
= env
->insns_flags2
;
10364 ctx
.access_type
= -1;
10365 ctx
.le_mode
= env
->hflags
& (1 << MSR_LE
) ? 1 : 0;
10366 #if defined(TARGET_PPC64)
10367 ctx
.sf_mode
= msr_is_64bit(env
, env
->msr
);
10368 ctx
.has_cfar
= !!(env
->flags
& POWERPC_FLAG_CFAR
);
10370 ctx
.fpu_enabled
= msr_fp
;
10371 if ((env
->flags
& POWERPC_FLAG_SPE
) && msr_spe
)
10372 ctx
.spe_enabled
= msr_spe
;
10374 ctx
.spe_enabled
= 0;
10375 if ((env
->flags
& POWERPC_FLAG_VRE
) && msr_vr
)
10376 ctx
.altivec_enabled
= msr_vr
;
10378 ctx
.altivec_enabled
= 0;
10379 if ((env
->flags
& POWERPC_FLAG_VSX
) && msr_vsx
) {
10380 ctx
.vsx_enabled
= msr_vsx
;
10382 ctx
.vsx_enabled
= 0;
10384 if ((env
->flags
& POWERPC_FLAG_SE
) && msr_se
)
10385 ctx
.singlestep_enabled
= CPU_SINGLE_STEP
;
10387 ctx
.singlestep_enabled
= 0;
10388 if ((env
->flags
& POWERPC_FLAG_BE
) && msr_be
)
10389 ctx
.singlestep_enabled
|= CPU_BRANCH_STEP
;
10390 if (unlikely(cs
->singlestep_enabled
)) {
10391 ctx
.singlestep_enabled
|= GDBSTUB_SINGLE_STEP
;
10393 #if defined (DO_SINGLE_STEP) && 0
10394 /* Single step trace mode */
10398 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
10399 if (max_insns
== 0)
10400 max_insns
= CF_COUNT_MASK
;
10403 /* Set env in case of segfault during code fetch */
10404 while (ctx
.exception
== POWERPC_EXCP_NONE
10405 && tcg_ctx
.gen_opc_ptr
< gen_opc_end
) {
10406 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
10407 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
10408 if (bp
->pc
== ctx
.nip
) {
10409 gen_debug_exception(ctxp
);
10414 if (unlikely(search_pc
)) {
10415 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
10419 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
10421 tcg_ctx
.gen_opc_pc
[lj
] = ctx
.nip
;
10422 tcg_ctx
.gen_opc_instr_start
[lj
] = 1;
10423 tcg_ctx
.gen_opc_icount
[lj
] = num_insns
;
10425 LOG_DISAS("----------------\n");
10426 LOG_DISAS("nip=" TARGET_FMT_lx
" super=%d ir=%d\n",
10427 ctx
.nip
, ctx
.mem_idx
, (int)msr_ir
);
10428 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
10430 if (unlikely(ctx
.le_mode
)) {
10431 ctx
.opcode
= bswap32(cpu_ldl_code(env
, ctx
.nip
));
10433 ctx
.opcode
= cpu_ldl_code(env
, ctx
.nip
);
10435 LOG_DISAS("translate opcode %08x (%02x %02x %02x) (%s)\n",
10436 ctx
.opcode
, opc1(ctx
.opcode
), opc2(ctx
.opcode
),
10437 opc3(ctx
.opcode
), ctx
.le_mode
? "little" : "big");
10438 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
10439 tcg_gen_debug_insn_start(ctx
.nip
);
10442 table
= env
->opcodes
;
10444 handler
= table
[opc1(ctx
.opcode
)];
10445 if (is_indirect_opcode(handler
)) {
10446 table
= ind_table(handler
);
10447 handler
= table
[opc2(ctx
.opcode
)];
10448 if (is_indirect_opcode(handler
)) {
10449 table
= ind_table(handler
);
10450 handler
= table
[opc3(ctx
.opcode
)];
10453 /* Is opcode *REALLY* valid ? */
10454 if (unlikely(handler
->handler
== &gen_invalid
)) {
10455 if (qemu_log_enabled()) {
10456 qemu_log("invalid/unsupported opcode: "
10457 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx
" %d\n",
10458 opc1(ctx
.opcode
), opc2(ctx
.opcode
),
10459 opc3(ctx
.opcode
), ctx
.opcode
, ctx
.nip
- 4, (int)msr_ir
);
10464 if (unlikely(handler
->type
& (PPC_SPE
| PPC_SPE_SINGLE
| PPC_SPE_DOUBLE
) && Rc(ctx
.opcode
))) {
10465 inval
= handler
->inval2
;
10467 inval
= handler
->inval1
;
10470 if (unlikely((ctx
.opcode
& inval
) != 0)) {
10471 if (qemu_log_enabled()) {
10472 qemu_log("invalid bits: %08x for opcode: "
10473 "%02x - %02x - %02x (%08x) " TARGET_FMT_lx
"\n",
10474 ctx
.opcode
& inval
, opc1(ctx
.opcode
),
10475 opc2(ctx
.opcode
), opc3(ctx
.opcode
),
10476 ctx
.opcode
, ctx
.nip
- 4);
10478 gen_inval_exception(ctxp
, POWERPC_EXCP_INVAL_INVAL
);
10482 (*(handler
->handler
))(&ctx
);
10483 #if defined(DO_PPC_STATISTICS)
10486 /* Check trace mode exceptions */
10487 if (unlikely(ctx
.singlestep_enabled
& CPU_SINGLE_STEP
&&
10488 (ctx
.nip
<= 0x100 || ctx
.nip
> 0xF00) &&
10489 ctx
.exception
!= POWERPC_SYSCALL
&&
10490 ctx
.exception
!= POWERPC_EXCP_TRAP
&&
10491 ctx
.exception
!= POWERPC_EXCP_BRANCH
)) {
10492 gen_exception(ctxp
, POWERPC_EXCP_TRACE
);
10493 } else if (unlikely(((ctx
.nip
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
10494 (cs
->singlestep_enabled
) ||
10496 num_insns
>= max_insns
)) {
10497 /* if we reach a page boundary or are single stepping, stop
10503 if (tb
->cflags
& CF_LAST_IO
)
10505 if (ctx
.exception
== POWERPC_EXCP_NONE
) {
10506 gen_goto_tb(&ctx
, 0, ctx
.nip
);
10507 } else if (ctx
.exception
!= POWERPC_EXCP_BRANCH
) {
10508 if (unlikely(cs
->singlestep_enabled
)) {
10509 gen_debug_exception(ctxp
);
10511 /* Generate the return instruction */
10512 tcg_gen_exit_tb(0);
10514 gen_tb_end(tb
, num_insns
);
10515 *tcg_ctx
.gen_opc_ptr
= INDEX_op_end
;
10516 if (unlikely(search_pc
)) {
10517 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
10520 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
10522 tb
->size
= ctx
.nip
- pc_start
;
10523 tb
->icount
= num_insns
;
10525 #if defined(DEBUG_DISAS)
10526 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
10528 flags
= env
->bfd_mach
;
10529 flags
|= ctx
.le_mode
<< 16;
10530 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
10531 log_target_disas(env
, pc_start
, ctx
.nip
- pc_start
, flags
);
10537 void gen_intermediate_code (CPUPPCState
*env
, struct TranslationBlock
*tb
)
10539 gen_intermediate_code_internal(ppc_env_get_cpu(env
), tb
, false);
10542 void gen_intermediate_code_pc (CPUPPCState
*env
, struct TranslationBlock
*tb
)
10544 gen_intermediate_code_internal(ppc_env_get_cpu(env
), tb
, true);
10547 void restore_state_to_opc(CPUPPCState
*env
, TranslationBlock
*tb
, int pc_pos
)
10549 env
->nip
= tcg_ctx
.gen_opc_pc
[pc_pos
];