4 * Standard FPU translation
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(tcg_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(tcg_env, arg);
15 gen_helper_float_check_status(tcg_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
26 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
28 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
32 /*** Floating-Point arithmetic ***/
33 static bool do_helper_acb(DisasContext *ctx, arg_A *a,
34 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64,
37 TCGv_i64 t0, t1, t2, t3;
38 REQUIRE_INSNS_FLAGS(ctx, FLOAT);
40 t0 = tcg_temp_new_i64();
41 t1 = tcg_temp_new_i64();
42 t2 = tcg_temp_new_i64();
43 t3 = tcg_temp_new_i64();
48 helper(t3, tcg_env, t0, t1, t2);
50 gen_compute_fprf_float64(t3);
51 if (unlikely(a->rc)) {
52 gen_set_cr1_from_fpscr(ctx);
57 static bool do_helper_ab(DisasContext *ctx, arg_A_tab *a,
58 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64,
62 REQUIRE_INSNS_FLAGS(ctx, FLOAT);
64 t0 = tcg_temp_new_i64();
65 t1 = tcg_temp_new_i64();
66 t2 = tcg_temp_new_i64();
70 helper(t2, tcg_env, t0, t1);
72 gen_compute_fprf_float64(t2);
73 if (unlikely(a->rc)) {
74 gen_set_cr1_from_fpscr(ctx);
79 static bool do_helper_ac(DisasContext *ctx, arg_A_tac *a,
80 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64,
84 REQUIRE_INSNS_FLAGS(ctx, FLOAT);
86 t0 = tcg_temp_new_i64();
87 t1 = tcg_temp_new_i64();
88 t2 = tcg_temp_new_i64();
92 helper(t2, tcg_env, t0, t1);
94 gen_compute_fprf_float64(t2);
95 if (unlikely(a->rc)) {
96 gen_set_cr1_from_fpscr(ctx);
101 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
102 static void gen_f##name(DisasContext *ctx) \
106 if (unlikely(!ctx->fpu_enabled)) { \
107 gen_exception(ctx, POWERPC_EXCP_FPU); \
110 t0 = tcg_temp_new_i64(); \
111 t1 = tcg_temp_new_i64(); \
112 gen_reset_fpstatus(); \
113 get_fpr(t0, rB(ctx->opcode)); \
114 gen_helper_f##name(t1, tcg_env, t0); \
115 set_fpr(rD(ctx->opcode), t1); \
117 gen_helper_compute_fprf_float64(tcg_env, t1); \
119 gen_helper_float_check_status(tcg_env); \
120 if (unlikely(Rc(ctx->opcode) != 0)) { \
121 gen_set_cr1_from_fpscr(ctx); \
125 static bool do_helper_bs(DisasContext *ctx, arg_A_tb *a,
126 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64))
130 t0 = tcg_temp_new_i64();
131 t1 = tcg_temp_new_i64();
132 gen_reset_fpstatus();
134 helper(t1, tcg_env, t0);
136 gen_compute_fprf_float64(t1);
137 if (unlikely(a->rc)) {
138 gen_set_cr1_from_fpscr(ctx);
143 static bool trans_FSEL(DisasContext *ctx, arg_A *a)
147 REQUIRE_INSNS_FLAGS(ctx, FLOAT_FSEL);
150 t0 = tcg_temp_new_i64();
151 t1 = tcg_temp_new_i64();
152 t2 = tcg_temp_new_i64();
158 gen_helper_FSEL(t0, t0, t1, t2);
161 gen_set_cr1_from_fpscr(ctx);
166 static bool do_helper_fsqrt(DisasContext *ctx, arg_A_tb *a,
167 void (*helper)(TCGv_i64, TCGv_ptr, TCGv_i64))
171 REQUIRE_INSNS_FLAGS(ctx, FLOAT_FSQRT);
174 t0 = tcg_temp_new_i64();
175 t1 = tcg_temp_new_i64();
177 gen_reset_fpstatus();
179 helper(t1, tcg_env, t0);
181 gen_compute_fprf_float64(t1);
182 if (unlikely(a->rc != 0)) {
183 gen_set_cr1_from_fpscr(ctx);
188 TRANS(FADD, do_helper_ab, gen_helper_FADD);
189 TRANS(FADDS, do_helper_ab, gen_helper_FADDS);
190 TRANS(FSUB, do_helper_ab, gen_helper_FSUB);
191 TRANS(FSUBS, do_helper_ab, gen_helper_FSUBS);
192 TRANS(FDIV, do_helper_ab, gen_helper_FDIV);
193 TRANS(FDIVS, do_helper_ab, gen_helper_FDIVS);
194 TRANS(FMUL, do_helper_ac, gen_helper_FMUL);
195 TRANS(FMULS, do_helper_ac, gen_helper_FMULS);
197 TRANS(FMADD, do_helper_acb, gen_helper_FMADD);
198 TRANS(FMADDS, do_helper_acb, gen_helper_FMADDS);
199 TRANS(FMSUB, do_helper_acb, gen_helper_FMSUB);
200 TRANS(FMSUBS, do_helper_acb, gen_helper_FMSUBS);
202 TRANS(FNMADD, do_helper_acb, gen_helper_FNMADD);
203 TRANS(FNMADDS, do_helper_acb, gen_helper_FNMADDS);
204 TRANS(FNMSUB, do_helper_acb, gen_helper_FNMSUB);
205 TRANS(FNMSUBS, do_helper_acb, gen_helper_FNMSUBS);
207 TRANS_FLAGS(FLOAT_EXT, FRE, do_helper_bs, gen_helper_FRE);
208 TRANS_FLAGS(FLOAT_FRES, FRES, do_helper_bs, gen_helper_FRES);
209 TRANS_FLAGS(FLOAT_FRSQRTE, FRSQRTE, do_helper_bs, gen_helper_FRSQRTE);
210 TRANS_FLAGS(FLOAT_FRSQRTES, FRSQRTES, do_helper_bs, gen_helper_FRSQRTES);
212 TRANS(FSQRT, do_helper_fsqrt, gen_helper_FSQRT);
213 TRANS(FSQRTS, do_helper_fsqrt, gen_helper_FSQRTS);
215 /*** Floating-Point round & convert ***/
217 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
219 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
221 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
223 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
225 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
227 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
229 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
231 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
233 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
235 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
237 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
239 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
241 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
244 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
246 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
248 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
250 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
252 static bool trans_FTDIV(DisasContext *ctx, arg_X_bf *a)
255 REQUIRE_INSNS_FLAGS2(ctx, FP_TST_ISA206);
257 t0 = tcg_temp_new_i64();
258 t1 = tcg_temp_new_i64();
261 gen_helper_FTDIV(cpu_crf[a->bf], t0, t1);
265 static bool trans_FTSQRT(DisasContext *ctx, arg_X_bf_b *a)
268 REQUIRE_INSNS_FLAGS2(ctx, FP_TST_ISA206);
270 t0 = tcg_temp_new_i64();
272 gen_helper_FTSQRT(cpu_crf[a->bf], t0);
276 /*** Floating-Point compare ***/
279 static void gen_fcmpo(DisasContext *ctx)
284 if (unlikely(!ctx->fpu_enabled)) {
285 gen_exception(ctx, POWERPC_EXCP_FPU);
288 t0 = tcg_temp_new_i64();
289 t1 = tcg_temp_new_i64();
290 gen_reset_fpstatus();
291 crf = tcg_constant_i32(crfD(ctx->opcode));
292 get_fpr(t0, rA(ctx->opcode));
293 get_fpr(t1, rB(ctx->opcode));
294 gen_helper_fcmpo(tcg_env, t0, t1, crf);
295 gen_helper_float_check_status(tcg_env);
299 static void gen_fcmpu(DisasContext *ctx)
304 if (unlikely(!ctx->fpu_enabled)) {
305 gen_exception(ctx, POWERPC_EXCP_FPU);
308 t0 = tcg_temp_new_i64();
309 t1 = tcg_temp_new_i64();
310 gen_reset_fpstatus();
311 crf = tcg_constant_i32(crfD(ctx->opcode));
312 get_fpr(t0, rA(ctx->opcode));
313 get_fpr(t1, rB(ctx->opcode));
314 gen_helper_fcmpu(tcg_env, t0, t1, crf);
315 gen_helper_float_check_status(tcg_env);
318 /*** Floating-point move ***/
320 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
321 static void gen_fabs(DisasContext *ctx)
325 if (unlikely(!ctx->fpu_enabled)) {
326 gen_exception(ctx, POWERPC_EXCP_FPU);
329 t0 = tcg_temp_new_i64();
330 t1 = tcg_temp_new_i64();
331 get_fpr(t0, rB(ctx->opcode));
332 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63));
333 set_fpr(rD(ctx->opcode), t1);
334 if (unlikely(Rc(ctx->opcode))) {
335 gen_set_cr1_from_fpscr(ctx);
340 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
341 static void gen_fmr(DisasContext *ctx)
344 if (unlikely(!ctx->fpu_enabled)) {
345 gen_exception(ctx, POWERPC_EXCP_FPU);
348 t0 = tcg_temp_new_i64();
349 get_fpr(t0, rB(ctx->opcode));
350 set_fpr(rD(ctx->opcode), t0);
351 if (unlikely(Rc(ctx->opcode))) {
352 gen_set_cr1_from_fpscr(ctx);
357 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
358 static void gen_fnabs(DisasContext *ctx)
362 if (unlikely(!ctx->fpu_enabled)) {
363 gen_exception(ctx, POWERPC_EXCP_FPU);
366 t0 = tcg_temp_new_i64();
367 t1 = tcg_temp_new_i64();
368 get_fpr(t0, rB(ctx->opcode));
369 tcg_gen_ori_i64(t1, t0, 1ULL << 63);
370 set_fpr(rD(ctx->opcode), t1);
371 if (unlikely(Rc(ctx->opcode))) {
372 gen_set_cr1_from_fpscr(ctx);
377 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
378 static void gen_fneg(DisasContext *ctx)
382 if (unlikely(!ctx->fpu_enabled)) {
383 gen_exception(ctx, POWERPC_EXCP_FPU);
386 t0 = tcg_temp_new_i64();
387 t1 = tcg_temp_new_i64();
388 get_fpr(t0, rB(ctx->opcode));
389 tcg_gen_xori_i64(t1, t0, 1ULL << 63);
390 set_fpr(rD(ctx->opcode), t1);
391 if (unlikely(Rc(ctx->opcode))) {
392 gen_set_cr1_from_fpscr(ctx);
396 /* fcpsgn: PowerPC 2.05 specification */
397 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
398 static void gen_fcpsgn(DisasContext *ctx)
403 if (unlikely(!ctx->fpu_enabled)) {
404 gen_exception(ctx, POWERPC_EXCP_FPU);
407 t0 = tcg_temp_new_i64();
408 t1 = tcg_temp_new_i64();
409 t2 = tcg_temp_new_i64();
410 get_fpr(t0, rA(ctx->opcode));
411 get_fpr(t1, rB(ctx->opcode));
412 tcg_gen_deposit_i64(t2, t0, t1, 0, 63);
413 set_fpr(rD(ctx->opcode), t2);
414 if (unlikely(Rc(ctx->opcode))) {
415 gen_set_cr1_from_fpscr(ctx);
419 static void gen_fmrgew(DisasContext *ctx)
424 if (unlikely(!ctx->fpu_enabled)) {
425 gen_exception(ctx, POWERPC_EXCP_FPU);
428 b0 = tcg_temp_new_i64();
429 t0 = tcg_temp_new_i64();
430 t1 = tcg_temp_new_i64();
431 get_fpr(t0, rB(ctx->opcode));
432 tcg_gen_shri_i64(b0, t0, 32);
433 get_fpr(t0, rA(ctx->opcode));
434 tcg_gen_deposit_i64(t1, t0, b0, 0, 32);
435 set_fpr(rD(ctx->opcode), t1);
438 static void gen_fmrgow(DisasContext *ctx)
443 if (unlikely(!ctx->fpu_enabled)) {
444 gen_exception(ctx, POWERPC_EXCP_FPU);
447 t0 = tcg_temp_new_i64();
448 t1 = tcg_temp_new_i64();
449 t2 = tcg_temp_new_i64();
450 get_fpr(t0, rB(ctx->opcode));
451 get_fpr(t1, rA(ctx->opcode));
452 tcg_gen_deposit_i64(t2, t0, t1, 32, 32);
453 set_fpr(rD(ctx->opcode), t2);
456 /*** Floating-Point status & ctrl register ***/
459 static void gen_mcrfs(DisasContext *ctx)
461 TCGv tmp = tcg_temp_new();
463 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
468 if (unlikely(!ctx->fpu_enabled)) {
469 gen_exception(ctx, POWERPC_EXCP_FPU);
472 bfa = crfS(ctx->opcode);
475 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
476 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
477 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)],
479 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
480 /* Only the exception bits (including FX) should be cleared if read */
481 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr,
482 ~((0xF << shift) & FP_EX_CLEAR_BITS));
483 /* FEX and VX need to be updated, so don't set fpscr directly */
484 tmask = tcg_constant_i32(1 << nibble);
485 gen_helper_store_fpscr(tcg_env, tnew_fpscr, tmask);
488 static TCGv_i64 place_from_fpscr(int rt, uint64_t mask)
490 TCGv_i64 fpscr = tcg_temp_new_i64();
491 TCGv_i64 fpscr_masked = tcg_temp_new_i64();
493 tcg_gen_extu_tl_i64(fpscr, cpu_fpscr);
494 tcg_gen_andi_i64(fpscr_masked, fpscr, mask);
495 set_fpr(rt, fpscr_masked);
500 static void store_fpscr_masked(TCGv_i64 fpscr, uint64_t clear_mask,
501 TCGv_i64 set_mask, uint32_t store_mask)
503 TCGv_i64 fpscr_masked = tcg_temp_new_i64();
504 TCGv_i32 st_mask = tcg_constant_i32(store_mask);
506 tcg_gen_andi_i64(fpscr_masked, fpscr, ~clear_mask);
507 tcg_gen_or_i64(fpscr_masked, fpscr_masked, set_mask);
508 gen_helper_store_fpscr(tcg_env, fpscr_masked, st_mask);
511 static bool trans_MFFS_ISA207(DisasContext *ctx, arg_X_t_rc *a)
513 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
515 * Before Power ISA v3.0, MFFS bits 11~15 were reserved, any instruction
516 * with OPCD=63 and XO=583 should be decoded as MFFS.
518 return trans_MFFS(ctx, a);
521 * For Power ISA v3.0+, return false and let the pattern group
522 * select the correct instruction.
527 static bool trans_MFFS(DisasContext *ctx, arg_X_t_rc *a)
531 gen_reset_fpstatus();
532 place_from_fpscr(a->rt, UINT64_MAX);
534 gen_set_cr1_from_fpscr(ctx);
539 static bool trans_MFFSCE(DisasContext *ctx, arg_X_t *a)
545 gen_reset_fpstatus();
546 fpscr = place_from_fpscr(a->rt, UINT64_MAX);
547 store_fpscr_masked(fpscr, FP_ENABLES, tcg_constant_i64(0), 0x0003);
551 static bool trans_MFFSCRN(DisasContext *ctx, arg_X_tb *a)
557 t1 = tcg_temp_new_i64();
559 tcg_gen_andi_i64(t1, t1, FP_RN);
561 gen_reset_fpstatus();
562 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN);
563 store_fpscr_masked(fpscr, FP_RN, t1, 0x0001);
567 static bool trans_MFFSCDRN(DisasContext *ctx, arg_X_tb *a)
573 t1 = tcg_temp_new_i64();
575 tcg_gen_andi_i64(t1, t1, FP_DRN);
577 gen_reset_fpstatus();
578 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN);
579 store_fpscr_masked(fpscr, FP_DRN, t1, 0x0100);
583 static bool trans_MFFSCRNI(DisasContext *ctx, arg_X_imm2 *a)
589 t1 = tcg_temp_new_i64();
590 tcg_gen_movi_i64(t1, a->imm);
592 gen_reset_fpstatus();
593 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN);
594 store_fpscr_masked(fpscr, FP_RN, t1, 0x0001);
598 static bool trans_MFFSCDRNI(DisasContext *ctx, arg_X_imm3 *a)
604 t1 = tcg_temp_new_i64();
605 tcg_gen_movi_i64(t1, (uint64_t)a->imm << FPSCR_DRN0);
607 gen_reset_fpstatus();
608 fpscr = place_from_fpscr(a->rt, FP_DRN | FP_ENABLES | FP_NI | FP_RN);
609 store_fpscr_masked(fpscr, FP_DRN, t1, 0x0100);
613 static bool trans_MFFSL(DisasContext *ctx, arg_X_t *a)
617 gen_reset_fpstatus();
618 place_from_fpscr(a->rt, FP_DRN | FP_STATUS | FP_ENABLES | FP_NI | FP_RN);
623 static void gen_mtfsb0(DisasContext *ctx)
627 if (unlikely(!ctx->fpu_enabled)) {
628 gen_exception(ctx, POWERPC_EXCP_FPU);
631 crb = 31 - crbD(ctx->opcode);
632 gen_reset_fpstatus();
633 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
634 gen_helper_fpscr_clrbit(tcg_env, tcg_constant_i32(crb));
636 if (unlikely(Rc(ctx->opcode) != 0)) {
637 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
638 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
643 static void gen_mtfsb1(DisasContext *ctx)
647 if (unlikely(!ctx->fpu_enabled)) {
648 gen_exception(ctx, POWERPC_EXCP_FPU);
651 crb = 31 - crbD(ctx->opcode);
652 /* XXX: we pretend we can only do IEEE floating-point computations */
653 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
654 gen_helper_fpscr_setbit(tcg_env, tcg_constant_i32(crb));
656 if (unlikely(Rc(ctx->opcode) != 0)) {
657 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
658 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
660 /* We can raise a deferred exception */
661 gen_helper_fpscr_check_status(tcg_env);
665 static void gen_mtfsf(DisasContext *ctx)
671 if (unlikely(!ctx->fpu_enabled)) {
672 gen_exception(ctx, POWERPC_EXCP_FPU);
675 flm = FPFLM(ctx->opcode);
676 l = FPL(ctx->opcode);
677 w = FPW(ctx->opcode);
678 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
679 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
683 t0 = tcg_constant_i32(flm << (w * 8));
684 } else if (ctx->insns_flags2 & PPC2_ISA205) {
685 t0 = tcg_constant_i32(0xffff);
687 t0 = tcg_constant_i32(0xff);
689 t1 = tcg_temp_new_i64();
690 get_fpr(t1, rB(ctx->opcode));
691 gen_helper_store_fpscr(tcg_env, t1, t0);
692 if (unlikely(Rc(ctx->opcode) != 0)) {
693 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
694 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
696 /* We can raise a deferred exception */
697 gen_helper_fpscr_check_status(tcg_env);
701 static void gen_mtfsfi(DisasContext *ctx)
707 if (unlikely(!ctx->fpu_enabled)) {
708 gen_exception(ctx, POWERPC_EXCP_FPU);
711 w = FPW(ctx->opcode);
712 bf = FPBF(ctx->opcode);
713 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
714 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
717 sh = (8 * w) + 7 - bf;
718 t0 = tcg_constant_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
719 t1 = tcg_constant_i32(1 << sh);
720 gen_helper_store_fpscr(tcg_env, t0, t1);
721 if (unlikely(Rc(ctx->opcode) != 0)) {
722 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
723 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
725 /* We can raise a deferred exception */
726 gen_helper_fpscr_check_status(tcg_env);
729 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
731 TCGv_i32 tmp = tcg_temp_new_i32();
732 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
733 gen_helper_todouble(dest, tmp);
736 /* lfdepx (external PID lfdx) */
737 static void gen_lfdepx(DisasContext *ctx)
742 if (unlikely(!ctx->fpu_enabled)) {
743 gen_exception(ctx, POWERPC_EXCP_FPU);
746 gen_set_access_type(ctx, ACCESS_FLOAT);
748 t0 = tcg_temp_new_i64();
749 gen_addr_reg_index(ctx, EA);
750 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UQ));
751 set_fpr(rD(ctx->opcode), t0);
755 static void gen_lfdp(DisasContext *ctx)
759 if (unlikely(!ctx->fpu_enabled)) {
760 gen_exception(ctx, POWERPC_EXCP_FPU);
763 gen_set_access_type(ctx, ACCESS_FLOAT);
765 gen_addr_imm_index(ctx, EA, 0);
766 t0 = tcg_temp_new_i64();
768 * We only need to swap high and low halves. gen_qemu_ld64_i64
769 * does necessary 64-bit byteswap already.
771 if (unlikely(ctx->le_mode)) {
772 gen_qemu_ld64_i64(ctx, t0, EA);
773 set_fpr(rD(ctx->opcode) + 1, t0);
774 tcg_gen_addi_tl(EA, EA, 8);
775 gen_qemu_ld64_i64(ctx, t0, EA);
776 set_fpr(rD(ctx->opcode), t0);
778 gen_qemu_ld64_i64(ctx, t0, EA);
779 set_fpr(rD(ctx->opcode), t0);
780 tcg_gen_addi_tl(EA, EA, 8);
781 gen_qemu_ld64_i64(ctx, t0, EA);
782 set_fpr(rD(ctx->opcode) + 1, t0);
787 static void gen_lfdpx(DisasContext *ctx)
791 if (unlikely(!ctx->fpu_enabled)) {
792 gen_exception(ctx, POWERPC_EXCP_FPU);
795 gen_set_access_type(ctx, ACCESS_FLOAT);
797 gen_addr_reg_index(ctx, EA);
798 t0 = tcg_temp_new_i64();
800 * We only need to swap high and low halves. gen_qemu_ld64_i64
801 * does necessary 64-bit byteswap already.
803 if (unlikely(ctx->le_mode)) {
804 gen_qemu_ld64_i64(ctx, t0, EA);
805 set_fpr(rD(ctx->opcode) + 1, t0);
806 tcg_gen_addi_tl(EA, EA, 8);
807 gen_qemu_ld64_i64(ctx, t0, EA);
808 set_fpr(rD(ctx->opcode), t0);
810 gen_qemu_ld64_i64(ctx, t0, EA);
811 set_fpr(rD(ctx->opcode), t0);
812 tcg_gen_addi_tl(EA, EA, 8);
813 gen_qemu_ld64_i64(ctx, t0, EA);
814 set_fpr(rD(ctx->opcode) + 1, t0);
819 static void gen_lfiwax(DisasContext *ctx)
824 if (unlikely(!ctx->fpu_enabled)) {
825 gen_exception(ctx, POWERPC_EXCP_FPU);
828 gen_set_access_type(ctx, ACCESS_FLOAT);
831 t1 = tcg_temp_new_i64();
832 gen_addr_reg_index(ctx, EA);
833 gen_qemu_ld32s(ctx, t0, EA);
834 tcg_gen_ext_tl_i64(t1, t0);
835 set_fpr(rD(ctx->opcode), t1);
839 static void gen_lfiwzx(DisasContext *ctx)
843 if (unlikely(!ctx->fpu_enabled)) {
844 gen_exception(ctx, POWERPC_EXCP_FPU);
847 gen_set_access_type(ctx, ACCESS_FLOAT);
849 t0 = tcg_temp_new_i64();
850 gen_addr_reg_index(ctx, EA);
851 gen_qemu_ld32u_i64(ctx, t0, EA);
852 set_fpr(rD(ctx->opcode), t0);
855 #define GEN_STXF(name, stop, opc2, opc3, type) \
856 static void glue(gen_, name##x)(DisasContext *ctx) \
860 if (unlikely(!ctx->fpu_enabled)) { \
861 gen_exception(ctx, POWERPC_EXCP_FPU); \
864 gen_set_access_type(ctx, ACCESS_FLOAT); \
865 EA = tcg_temp_new(); \
866 t0 = tcg_temp_new_i64(); \
867 gen_addr_reg_index(ctx, EA); \
868 get_fpr(t0, rS(ctx->opcode)); \
869 gen_qemu_##stop(ctx, t0, EA); \
872 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
874 TCGv_i32 tmp = tcg_temp_new_i32();
875 gen_helper_tosingle(tmp, src);
876 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
879 /* stfdepx (external PID lfdx) */
880 static void gen_stfdepx(DisasContext *ctx)
885 if (unlikely(!ctx->fpu_enabled)) {
886 gen_exception(ctx, POWERPC_EXCP_FPU);
889 gen_set_access_type(ctx, ACCESS_FLOAT);
891 t0 = tcg_temp_new_i64();
892 gen_addr_reg_index(ctx, EA);
893 get_fpr(t0, rD(ctx->opcode));
894 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_UQ));
898 static void gen_stfdp(DisasContext *ctx)
902 if (unlikely(!ctx->fpu_enabled)) {
903 gen_exception(ctx, POWERPC_EXCP_FPU);
906 gen_set_access_type(ctx, ACCESS_FLOAT);
908 t0 = tcg_temp_new_i64();
909 gen_addr_imm_index(ctx, EA, 0);
911 * We only need to swap high and low halves. gen_qemu_st64_i64
912 * does necessary 64-bit byteswap already.
914 if (unlikely(ctx->le_mode)) {
915 get_fpr(t0, rD(ctx->opcode) + 1);
916 gen_qemu_st64_i64(ctx, t0, EA);
917 tcg_gen_addi_tl(EA, EA, 8);
918 get_fpr(t0, rD(ctx->opcode));
919 gen_qemu_st64_i64(ctx, t0, EA);
921 get_fpr(t0, rD(ctx->opcode));
922 gen_qemu_st64_i64(ctx, t0, EA);
923 tcg_gen_addi_tl(EA, EA, 8);
924 get_fpr(t0, rD(ctx->opcode) + 1);
925 gen_qemu_st64_i64(ctx, t0, EA);
930 static void gen_stfdpx(DisasContext *ctx)
934 if (unlikely(!ctx->fpu_enabled)) {
935 gen_exception(ctx, POWERPC_EXCP_FPU);
938 gen_set_access_type(ctx, ACCESS_FLOAT);
940 t0 = tcg_temp_new_i64();
941 gen_addr_reg_index(ctx, EA);
943 * We only need to swap high and low halves. gen_qemu_st64_i64
944 * does necessary 64-bit byteswap already.
946 if (unlikely(ctx->le_mode)) {
947 get_fpr(t0, rD(ctx->opcode) + 1);
948 gen_qemu_st64_i64(ctx, t0, EA);
949 tcg_gen_addi_tl(EA, EA, 8);
950 get_fpr(t0, rD(ctx->opcode));
951 gen_qemu_st64_i64(ctx, t0, EA);
953 get_fpr(t0, rD(ctx->opcode));
954 gen_qemu_st64_i64(ctx, t0, EA);
955 tcg_gen_addi_tl(EA, EA, 8);
956 get_fpr(t0, rD(ctx->opcode) + 1);
957 gen_qemu_st64_i64(ctx, t0, EA);
962 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
964 TCGv t0 = tcg_temp_new();
965 tcg_gen_trunc_i64_tl(t0, arg1),
966 gen_qemu_st32(ctx, t0, arg2);
969 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
971 /* Floating-point Load/Store Instructions */
972 static bool do_lsfpsd(DisasContext *ctx, int rt, int ra, TCGv displ,
973 bool update, bool store, bool single)
977 REQUIRE_INSNS_FLAGS(ctx, FLOAT);
979 if (update && ra == 0) {
983 gen_set_access_type(ctx, ACCESS_FLOAT);
984 t0 = tcg_temp_new_i64();
985 ea = do_ea_calc(ctx, ra, displ);
989 gen_qemu_st32fs(ctx, t0, ea);
991 gen_qemu_st64_i64(ctx, t0, ea);
995 gen_qemu_ld32fs(ctx, t0, ea);
997 gen_qemu_ld64_i64(ctx, t0, ea);
1002 tcg_gen_mov_tl(cpu_gpr[ra], ea);
1007 static bool do_lsfp_D(DisasContext *ctx, arg_D *a, bool update, bool store,
1010 return do_lsfpsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), update, store,
1014 static bool do_lsfp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool update,
1015 bool store, bool single)
1018 if (!resolve_PLS_D(ctx, &d, a)) {
1021 return do_lsfp_D(ctx, &d, update, store, single);
1024 static bool do_lsfp_X(DisasContext *ctx, arg_X *a, bool update,
1025 bool store, bool single)
1027 return do_lsfpsd(ctx, a->rt, a->ra, cpu_gpr[a->rb], update, store, single);
1030 TRANS(LFS, do_lsfp_D, false, false, true)
1031 TRANS(LFSU, do_lsfp_D, true, false, true)
1032 TRANS(LFSX, do_lsfp_X, false, false, true)
1033 TRANS(LFSUX, do_lsfp_X, true, false, true)
1034 TRANS(PLFS, do_lsfp_PLS_D, false, false, true)
1036 TRANS(LFD, do_lsfp_D, false, false, false)
1037 TRANS(LFDU, do_lsfp_D, true, false, false)
1038 TRANS(LFDX, do_lsfp_X, false, false, false)
1039 TRANS(LFDUX, do_lsfp_X, true, false, false)
1040 TRANS(PLFD, do_lsfp_PLS_D, false, false, false)
1042 TRANS(STFS, do_lsfp_D, false, true, true)
1043 TRANS(STFSU, do_lsfp_D, true, true, true)
1044 TRANS(STFSX, do_lsfp_X, false, true, true)
1045 TRANS(STFSUX, do_lsfp_X, true, true, true)
1046 TRANS(PSTFS, do_lsfp_PLS_D, false, true, true)
1048 TRANS(STFD, do_lsfp_D, false, true, false)
1049 TRANS(STFDU, do_lsfp_D, true, true, false)
1050 TRANS(STFDX, do_lsfp_X, false, true, false)
1051 TRANS(STFDUX, do_lsfp_X, true, true, false)
1052 TRANS(PSTFD, do_lsfp_PLS_D, false, true, false)