4 * Standard FPU translation
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(cpu_env, arg);
15 gen_helper_float_check_status(cpu_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
24 tcg_temp_free_i32(tmp);
27 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op1, op2, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
41 if (unlikely(!ctx->fpu_enabled)) { \
42 gen_exception(ctx, POWERPC_EXCP_FPU); \
45 t0 = tcg_temp_new_i64(); \
46 t1 = tcg_temp_new_i64(); \
47 t2 = tcg_temp_new_i64(); \
48 t3 = tcg_temp_new_i64(); \
49 gen_reset_fpstatus(); \
50 get_fpr(t0, rA(ctx->opcode)); \
51 get_fpr(t1, rC(ctx->opcode)); \
52 get_fpr(t2, rB(ctx->opcode)); \
53 gen_helper_f##name(t3, cpu_env, t0, t1, t2); \
54 set_fpr(rD(ctx->opcode), t3); \
56 gen_compute_fprf_float64(t3); \
58 if (unlikely(Rc(ctx->opcode) != 0)) { \
59 gen_set_cr1_from_fpscr(ctx); \
61 tcg_temp_free_i64(t0); \
62 tcg_temp_free_i64(t1); \
63 tcg_temp_free_i64(t2); \
64 tcg_temp_free_i64(t3); \
67 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
68 _GEN_FLOAT_ACB(name, 0x3F, op2, set_fprf, type); \
69 _GEN_FLOAT_ACB(name##s, 0x3B, op2, set_fprf, type);
71 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
72 static void gen_f##name(DisasContext *ctx) \
77 if (unlikely(!ctx->fpu_enabled)) { \
78 gen_exception(ctx, POWERPC_EXCP_FPU); \
81 t0 = tcg_temp_new_i64(); \
82 t1 = tcg_temp_new_i64(); \
83 t2 = tcg_temp_new_i64(); \
84 gen_reset_fpstatus(); \
85 get_fpr(t0, rA(ctx->opcode)); \
86 get_fpr(t1, rB(ctx->opcode)); \
87 gen_helper_f##op(t2, cpu_env, t0, t1); \
89 gen_helper_frsp(t2, cpu_env, t2); \
91 set_fpr(rD(ctx->opcode), t2); \
93 gen_compute_fprf_float64(t2); \
95 if (unlikely(Rc(ctx->opcode) != 0)) { \
96 gen_set_cr1_from_fpscr(ctx); \
98 tcg_temp_free_i64(t0); \
99 tcg_temp_free_i64(t1); \
100 tcg_temp_free_i64(t2); \
102 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
103 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
104 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
106 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
107 static void gen_f##name(DisasContext *ctx) \
112 if (unlikely(!ctx->fpu_enabled)) { \
113 gen_exception(ctx, POWERPC_EXCP_FPU); \
116 t0 = tcg_temp_new_i64(); \
117 t1 = tcg_temp_new_i64(); \
118 t2 = tcg_temp_new_i64(); \
119 gen_reset_fpstatus(); \
120 get_fpr(t0, rA(ctx->opcode)); \
121 get_fpr(t1, rC(ctx->opcode)); \
122 gen_helper_f##op(t2, cpu_env, t0, t1); \
124 gen_helper_frsp(t2, cpu_env, t2); \
126 set_fpr(rD(ctx->opcode), t2); \
128 gen_compute_fprf_float64(t2); \
130 if (unlikely(Rc(ctx->opcode) != 0)) { \
131 gen_set_cr1_from_fpscr(ctx); \
133 tcg_temp_free_i64(t0); \
134 tcg_temp_free_i64(t1); \
135 tcg_temp_free_i64(t2); \
137 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
138 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
139 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
141 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
142 static void gen_f##name(DisasContext *ctx) \
146 if (unlikely(!ctx->fpu_enabled)) { \
147 gen_exception(ctx, POWERPC_EXCP_FPU); \
150 t0 = tcg_temp_new_i64(); \
151 t1 = tcg_temp_new_i64(); \
152 gen_reset_fpstatus(); \
153 get_fpr(t0, rB(ctx->opcode)); \
154 gen_helper_f##name(t1, cpu_env, t0); \
155 set_fpr(rD(ctx->opcode), t1); \
157 gen_helper_compute_fprf_float64(cpu_env, t1); \
159 gen_helper_float_check_status(cpu_env); \
160 if (unlikely(Rc(ctx->opcode) != 0)) { \
161 gen_set_cr1_from_fpscr(ctx); \
163 tcg_temp_free_i64(t0); \
164 tcg_temp_free_i64(t1); \
167 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
168 static void gen_f##name(DisasContext *ctx) \
172 if (unlikely(!ctx->fpu_enabled)) { \
173 gen_exception(ctx, POWERPC_EXCP_FPU); \
176 t0 = tcg_temp_new_i64(); \
177 t1 = tcg_temp_new_i64(); \
178 gen_reset_fpstatus(); \
179 get_fpr(t0, rB(ctx->opcode)); \
180 gen_helper_f##name(t1, cpu_env, t0); \
181 set_fpr(rD(ctx->opcode), t1); \
183 gen_compute_fprf_float64(t1); \
185 if (unlikely(Rc(ctx->opcode) != 0)) { \
186 gen_set_cr1_from_fpscr(ctx); \
188 tcg_temp_free_i64(t0); \
189 tcg_temp_free_i64(t1); \
193 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
195 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
197 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
200 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
203 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
206 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
209 static void gen_frsqrtes(DisasContext *ctx)
213 if (unlikely(!ctx->fpu_enabled)) {
214 gen_exception(ctx, POWERPC_EXCP_FPU);
217 t0 = tcg_temp_new_i64();
218 t1 = tcg_temp_new_i64();
219 gen_reset_fpstatus();
220 get_fpr(t0, rB(ctx->opcode));
221 gen_helper_frsqrte(t1, cpu_env, t0);
222 gen_helper_frsp(t1, cpu_env, t1);
223 set_fpr(rD(ctx->opcode), t1);
224 gen_compute_fprf_float64(t1);
225 if (unlikely(Rc(ctx->opcode) != 0)) {
226 gen_set_cr1_from_fpscr(ctx);
228 tcg_temp_free_i64(t0);
229 tcg_temp_free_i64(t1);
233 _GEN_FLOAT_ACB(sel, 0x3F, 0x17, 0, PPC_FLOAT_FSEL);
235 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
239 static void gen_fsqrt(DisasContext *ctx)
243 if (unlikely(!ctx->fpu_enabled)) {
244 gen_exception(ctx, POWERPC_EXCP_FPU);
247 t0 = tcg_temp_new_i64();
248 t1 = tcg_temp_new_i64();
249 gen_reset_fpstatus();
250 get_fpr(t0, rB(ctx->opcode));
251 gen_helper_fsqrt(t1, cpu_env, t0);
252 set_fpr(rD(ctx->opcode), t1);
253 gen_compute_fprf_float64(t1);
254 if (unlikely(Rc(ctx->opcode) != 0)) {
255 gen_set_cr1_from_fpscr(ctx);
257 tcg_temp_free_i64(t0);
258 tcg_temp_free_i64(t1);
261 static void gen_fsqrts(DisasContext *ctx)
265 if (unlikely(!ctx->fpu_enabled)) {
266 gen_exception(ctx, POWERPC_EXCP_FPU);
269 t0 = tcg_temp_new_i64();
270 t1 = tcg_temp_new_i64();
271 gen_reset_fpstatus();
272 get_fpr(t0, rB(ctx->opcode));
273 gen_helper_fsqrt(t1, cpu_env, t0);
274 gen_helper_frsp(t1, cpu_env, t1);
275 set_fpr(rD(ctx->opcode), t1);
276 gen_compute_fprf_float64(t1);
277 if (unlikely(Rc(ctx->opcode) != 0)) {
278 gen_set_cr1_from_fpscr(ctx);
280 tcg_temp_free_i64(t0);
281 tcg_temp_free_i64(t1);
284 /*** Floating-Point multiply-and-add ***/
286 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
288 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
289 /* fnmadd - fnmadds */
290 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
291 /* fnmsub - fnmsubs */
292 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
294 /*** Floating-Point round & convert ***/
296 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
298 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
300 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
302 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
304 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
306 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
308 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
310 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
312 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
314 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
316 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
318 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
320 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
323 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
325 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
327 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
329 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
331 static void gen_ftdiv(DisasContext *ctx)
335 if (unlikely(!ctx->fpu_enabled)) {
336 gen_exception(ctx, POWERPC_EXCP_FPU);
339 t0 = tcg_temp_new_i64();
340 t1 = tcg_temp_new_i64();
341 get_fpr(t0, rA(ctx->opcode));
342 get_fpr(t1, rB(ctx->opcode));
343 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1);
344 tcg_temp_free_i64(t0);
345 tcg_temp_free_i64(t1);
348 static void gen_ftsqrt(DisasContext *ctx)
351 if (unlikely(!ctx->fpu_enabled)) {
352 gen_exception(ctx, POWERPC_EXCP_FPU);
355 t0 = tcg_temp_new_i64();
356 get_fpr(t0, rB(ctx->opcode));
357 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0);
358 tcg_temp_free_i64(t0);
363 /*** Floating-Point compare ***/
366 static void gen_fcmpo(DisasContext *ctx)
371 if (unlikely(!ctx->fpu_enabled)) {
372 gen_exception(ctx, POWERPC_EXCP_FPU);
375 t0 = tcg_temp_new_i64();
376 t1 = tcg_temp_new_i64();
377 gen_reset_fpstatus();
378 crf = tcg_const_i32(crfD(ctx->opcode));
379 get_fpr(t0, rA(ctx->opcode));
380 get_fpr(t1, rB(ctx->opcode));
381 gen_helper_fcmpo(cpu_env, t0, t1, crf);
382 tcg_temp_free_i32(crf);
383 gen_helper_float_check_status(cpu_env);
384 tcg_temp_free_i64(t0);
385 tcg_temp_free_i64(t1);
389 static void gen_fcmpu(DisasContext *ctx)
394 if (unlikely(!ctx->fpu_enabled)) {
395 gen_exception(ctx, POWERPC_EXCP_FPU);
398 t0 = tcg_temp_new_i64();
399 t1 = tcg_temp_new_i64();
400 gen_reset_fpstatus();
401 crf = tcg_const_i32(crfD(ctx->opcode));
402 get_fpr(t0, rA(ctx->opcode));
403 get_fpr(t1, rB(ctx->opcode));
404 gen_helper_fcmpu(cpu_env, t0, t1, crf);
405 tcg_temp_free_i32(crf);
406 gen_helper_float_check_status(cpu_env);
407 tcg_temp_free_i64(t0);
408 tcg_temp_free_i64(t1);
411 /*** Floating-point move ***/
413 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
414 static void gen_fabs(DisasContext *ctx)
418 if (unlikely(!ctx->fpu_enabled)) {
419 gen_exception(ctx, POWERPC_EXCP_FPU);
422 t0 = tcg_temp_new_i64();
423 t1 = tcg_temp_new_i64();
424 get_fpr(t0, rB(ctx->opcode));
425 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63));
426 set_fpr(rD(ctx->opcode), t1);
427 if (unlikely(Rc(ctx->opcode))) {
428 gen_set_cr1_from_fpscr(ctx);
430 tcg_temp_free_i64(t0);
431 tcg_temp_free_i64(t1);
435 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
436 static void gen_fmr(DisasContext *ctx)
439 if (unlikely(!ctx->fpu_enabled)) {
440 gen_exception(ctx, POWERPC_EXCP_FPU);
443 t0 = tcg_temp_new_i64();
444 get_fpr(t0, rB(ctx->opcode));
445 set_fpr(rD(ctx->opcode), t0);
446 if (unlikely(Rc(ctx->opcode))) {
447 gen_set_cr1_from_fpscr(ctx);
449 tcg_temp_free_i64(t0);
453 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
454 static void gen_fnabs(DisasContext *ctx)
458 if (unlikely(!ctx->fpu_enabled)) {
459 gen_exception(ctx, POWERPC_EXCP_FPU);
462 t0 = tcg_temp_new_i64();
463 t1 = tcg_temp_new_i64();
464 get_fpr(t0, rB(ctx->opcode));
465 tcg_gen_ori_i64(t1, t0, 1ULL << 63);
466 set_fpr(rD(ctx->opcode), t1);
467 if (unlikely(Rc(ctx->opcode))) {
468 gen_set_cr1_from_fpscr(ctx);
470 tcg_temp_free_i64(t0);
471 tcg_temp_free_i64(t1);
475 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
476 static void gen_fneg(DisasContext *ctx)
480 if (unlikely(!ctx->fpu_enabled)) {
481 gen_exception(ctx, POWERPC_EXCP_FPU);
484 t0 = tcg_temp_new_i64();
485 t1 = tcg_temp_new_i64();
486 get_fpr(t0, rB(ctx->opcode));
487 tcg_gen_xori_i64(t1, t0, 1ULL << 63);
488 set_fpr(rD(ctx->opcode), t1);
489 if (unlikely(Rc(ctx->opcode))) {
490 gen_set_cr1_from_fpscr(ctx);
492 tcg_temp_free_i64(t0);
493 tcg_temp_free_i64(t1);
496 /* fcpsgn: PowerPC 2.05 specification */
497 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
498 static void gen_fcpsgn(DisasContext *ctx)
503 if (unlikely(!ctx->fpu_enabled)) {
504 gen_exception(ctx, POWERPC_EXCP_FPU);
507 t0 = tcg_temp_new_i64();
508 t1 = tcg_temp_new_i64();
509 t2 = tcg_temp_new_i64();
510 get_fpr(t0, rA(ctx->opcode));
511 get_fpr(t1, rB(ctx->opcode));
512 tcg_gen_deposit_i64(t2, t0, t1, 0, 63);
513 set_fpr(rD(ctx->opcode), t2);
514 if (unlikely(Rc(ctx->opcode))) {
515 gen_set_cr1_from_fpscr(ctx);
517 tcg_temp_free_i64(t0);
518 tcg_temp_free_i64(t1);
519 tcg_temp_free_i64(t2);
522 static void gen_fmrgew(DisasContext *ctx)
527 if (unlikely(!ctx->fpu_enabled)) {
528 gen_exception(ctx, POWERPC_EXCP_FPU);
531 b0 = tcg_temp_new_i64();
532 t0 = tcg_temp_new_i64();
533 t1 = tcg_temp_new_i64();
534 get_fpr(t0, rB(ctx->opcode));
535 tcg_gen_shri_i64(b0, t0, 32);
536 get_fpr(t0, rA(ctx->opcode));
537 tcg_gen_deposit_i64(t1, t0, b0, 0, 32);
538 set_fpr(rD(ctx->opcode), t1);
539 tcg_temp_free_i64(b0);
540 tcg_temp_free_i64(t0);
541 tcg_temp_free_i64(t1);
544 static void gen_fmrgow(DisasContext *ctx)
549 if (unlikely(!ctx->fpu_enabled)) {
550 gen_exception(ctx, POWERPC_EXCP_FPU);
553 t0 = tcg_temp_new_i64();
554 t1 = tcg_temp_new_i64();
555 t2 = tcg_temp_new_i64();
556 get_fpr(t0, rB(ctx->opcode));
557 get_fpr(t1, rA(ctx->opcode));
558 tcg_gen_deposit_i64(t2, t0, t1, 32, 32);
559 set_fpr(rD(ctx->opcode), t2);
560 tcg_temp_free_i64(t0);
561 tcg_temp_free_i64(t1);
562 tcg_temp_free_i64(t2);
565 /*** Floating-Point status & ctrl register ***/
568 static void gen_mcrfs(DisasContext *ctx)
570 TCGv tmp = tcg_temp_new();
572 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
577 if (unlikely(!ctx->fpu_enabled)) {
578 gen_exception(ctx, POWERPC_EXCP_FPU);
581 bfa = crfS(ctx->opcode);
584 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
585 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
586 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)],
589 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
590 /* Only the exception bits (including FX) should be cleared if read */
591 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr,
592 ~((0xF << shift) & FP_EX_CLEAR_BITS));
593 /* FEX and VX need to be updated, so don't set fpscr directly */
594 tmask = tcg_const_i32(1 << nibble);
595 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask);
596 tcg_temp_free_i32(tmask);
597 tcg_temp_free_i64(tnew_fpscr);
601 static void gen_mffs(DisasContext *ctx)
604 if (unlikely(!ctx->fpu_enabled)) {
605 gen_exception(ctx, POWERPC_EXCP_FPU);
608 t0 = tcg_temp_new_i64();
609 gen_reset_fpstatus();
610 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
611 set_fpr(rD(ctx->opcode), t0);
612 if (unlikely(Rc(ctx->opcode))) {
613 gen_set_cr1_from_fpscr(ctx);
615 tcg_temp_free_i64(t0);
619 static void gen_mffsl(DisasContext *ctx)
623 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
624 return gen_mffs(ctx);
627 if (unlikely(!ctx->fpu_enabled)) {
628 gen_exception(ctx, POWERPC_EXCP_FPU);
631 t0 = tcg_temp_new_i64();
632 gen_reset_fpstatus();
633 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
634 /* Mask everything except mode, status, and enables. */
635 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN);
636 set_fpr(rD(ctx->opcode), t0);
637 tcg_temp_free_i64(t0);
641 static void gen_mffsce(DisasContext *ctx)
646 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
647 return gen_mffs(ctx);
650 if (unlikely(!ctx->fpu_enabled)) {
651 gen_exception(ctx, POWERPC_EXCP_FPU);
655 t0 = tcg_temp_new_i64();
657 gen_reset_fpstatus();
658 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
659 set_fpr(rD(ctx->opcode), t0);
661 /* Clear exception enable bits in the FPSCR. */
662 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES);
663 mask = tcg_const_i32(0x0003);
664 gen_helper_store_fpscr(cpu_env, t0, mask);
666 tcg_temp_free_i32(mask);
667 tcg_temp_free_i64(t0);
670 static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1)
672 TCGv_i64 t0 = tcg_temp_new_i64();
673 TCGv_i32 mask = tcg_const_i32(0x0001);
675 gen_reset_fpstatus();
676 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
677 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN);
678 set_fpr(rD(ctx->opcode), t0);
680 /* Mask FPSCR value to clear RN. */
681 tcg_gen_andi_i64(t0, t0, ~FP_RN);
683 /* Merge RN into FPSCR value. */
684 tcg_gen_or_i64(t0, t0, t1);
686 gen_helper_store_fpscr(cpu_env, t0, mask);
688 tcg_temp_free_i32(mask);
689 tcg_temp_free_i64(t0);
693 static void gen_mffscrn(DisasContext *ctx)
697 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
698 return gen_mffs(ctx);
701 if (unlikely(!ctx->fpu_enabled)) {
702 gen_exception(ctx, POWERPC_EXCP_FPU);
706 t1 = tcg_temp_new_i64();
707 get_fpr(t1, rB(ctx->opcode));
708 /* Mask FRB to get just RN. */
709 tcg_gen_andi_i64(t1, t1, FP_RN);
711 gen_helper_mffscrn(ctx, t1);
713 tcg_temp_free_i64(t1);
717 static void gen_mffscrni(DisasContext *ctx)
721 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
722 return gen_mffs(ctx);
725 if (unlikely(!ctx->fpu_enabled)) {
726 gen_exception(ctx, POWERPC_EXCP_FPU);
730 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode));
732 gen_helper_mffscrn(ctx, t1);
734 tcg_temp_free_i64(t1);
738 static void gen_mtfsb0(DisasContext *ctx)
742 if (unlikely(!ctx->fpu_enabled)) {
743 gen_exception(ctx, POWERPC_EXCP_FPU);
746 crb = 31 - crbD(ctx->opcode);
747 gen_reset_fpstatus();
748 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
750 t0 = tcg_const_i32(crb);
751 gen_helper_fpscr_clrbit(cpu_env, t0);
752 tcg_temp_free_i32(t0);
754 if (unlikely(Rc(ctx->opcode) != 0)) {
755 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
756 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
761 static void gen_mtfsb1(DisasContext *ctx)
765 if (unlikely(!ctx->fpu_enabled)) {
766 gen_exception(ctx, POWERPC_EXCP_FPU);
769 crb = 31 - crbD(ctx->opcode);
770 /* XXX: we pretend we can only do IEEE floating-point computations */
771 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
773 t0 = tcg_const_i32(crb);
774 gen_helper_fpscr_setbit(cpu_env, t0);
775 tcg_temp_free_i32(t0);
777 if (unlikely(Rc(ctx->opcode) != 0)) {
778 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
779 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
781 /* We can raise a deferred exception */
782 gen_helper_fpscr_check_status(cpu_env);
786 static void gen_mtfsf(DisasContext *ctx)
792 if (unlikely(!ctx->fpu_enabled)) {
793 gen_exception(ctx, POWERPC_EXCP_FPU);
796 flm = FPFLM(ctx->opcode);
797 l = FPL(ctx->opcode);
798 w = FPW(ctx->opcode);
799 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
800 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
804 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
806 t0 = tcg_const_i32(flm << (w * 8));
808 t1 = tcg_temp_new_i64();
809 get_fpr(t1, rB(ctx->opcode));
810 gen_helper_store_fpscr(cpu_env, t1, t0);
811 tcg_temp_free_i32(t0);
812 if (unlikely(Rc(ctx->opcode) != 0)) {
813 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
814 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
816 /* We can raise a deferred exception */
817 gen_helper_fpscr_check_status(cpu_env);
818 tcg_temp_free_i64(t1);
822 static void gen_mtfsfi(DisasContext *ctx)
828 if (unlikely(!ctx->fpu_enabled)) {
829 gen_exception(ctx, POWERPC_EXCP_FPU);
832 w = FPW(ctx->opcode);
833 bf = FPBF(ctx->opcode);
834 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
835 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
838 sh = (8 * w) + 7 - bf;
839 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
840 t1 = tcg_const_i32(1 << sh);
841 gen_helper_store_fpscr(cpu_env, t0, t1);
842 tcg_temp_free_i64(t0);
843 tcg_temp_free_i32(t1);
844 if (unlikely(Rc(ctx->opcode) != 0)) {
845 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
846 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
848 /* We can raise a deferred exception */
849 gen_helper_fpscr_check_status(cpu_env);
852 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
854 TCGv_i32 tmp = tcg_temp_new_i32();
855 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
856 gen_helper_todouble(dest, tmp);
857 tcg_temp_free_i32(tmp);
860 /* lfdepx (external PID lfdx) */
861 static void gen_lfdepx(DisasContext *ctx)
866 if (unlikely(!ctx->fpu_enabled)) {
867 gen_exception(ctx, POWERPC_EXCP_FPU);
870 gen_set_access_type(ctx, ACCESS_FLOAT);
872 t0 = tcg_temp_new_i64();
873 gen_addr_reg_index(ctx, EA);
874 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_Q));
875 set_fpr(rD(ctx->opcode), t0);
877 tcg_temp_free_i64(t0);
881 static void gen_lfdp(DisasContext *ctx)
885 if (unlikely(!ctx->fpu_enabled)) {
886 gen_exception(ctx, POWERPC_EXCP_FPU);
889 gen_set_access_type(ctx, ACCESS_FLOAT);
891 gen_addr_imm_index(ctx, EA, 0);
892 t0 = tcg_temp_new_i64();
894 * We only need to swap high and low halves. gen_qemu_ld64_i64
895 * does necessary 64-bit byteswap already.
897 if (unlikely(ctx->le_mode)) {
898 gen_qemu_ld64_i64(ctx, t0, EA);
899 set_fpr(rD(ctx->opcode) + 1, t0);
900 tcg_gen_addi_tl(EA, EA, 8);
901 gen_qemu_ld64_i64(ctx, t0, EA);
902 set_fpr(rD(ctx->opcode), t0);
904 gen_qemu_ld64_i64(ctx, t0, EA);
905 set_fpr(rD(ctx->opcode), t0);
906 tcg_gen_addi_tl(EA, EA, 8);
907 gen_qemu_ld64_i64(ctx, t0, EA);
908 set_fpr(rD(ctx->opcode) + 1, t0);
911 tcg_temp_free_i64(t0);
915 static void gen_lfdpx(DisasContext *ctx)
919 if (unlikely(!ctx->fpu_enabled)) {
920 gen_exception(ctx, POWERPC_EXCP_FPU);
923 gen_set_access_type(ctx, ACCESS_FLOAT);
925 gen_addr_reg_index(ctx, EA);
926 t0 = tcg_temp_new_i64();
928 * We only need to swap high and low halves. gen_qemu_ld64_i64
929 * does necessary 64-bit byteswap already.
931 if (unlikely(ctx->le_mode)) {
932 gen_qemu_ld64_i64(ctx, t0, EA);
933 set_fpr(rD(ctx->opcode) + 1, t0);
934 tcg_gen_addi_tl(EA, EA, 8);
935 gen_qemu_ld64_i64(ctx, t0, EA);
936 set_fpr(rD(ctx->opcode), t0);
938 gen_qemu_ld64_i64(ctx, t0, EA);
939 set_fpr(rD(ctx->opcode), t0);
940 tcg_gen_addi_tl(EA, EA, 8);
941 gen_qemu_ld64_i64(ctx, t0, EA);
942 set_fpr(rD(ctx->opcode) + 1, t0);
945 tcg_temp_free_i64(t0);
949 static void gen_lfiwax(DisasContext *ctx)
954 if (unlikely(!ctx->fpu_enabled)) {
955 gen_exception(ctx, POWERPC_EXCP_FPU);
958 gen_set_access_type(ctx, ACCESS_FLOAT);
961 t1 = tcg_temp_new_i64();
962 gen_addr_reg_index(ctx, EA);
963 gen_qemu_ld32s(ctx, t0, EA);
964 tcg_gen_ext_tl_i64(t1, t0);
965 set_fpr(rD(ctx->opcode), t1);
968 tcg_temp_free_i64(t1);
972 static void gen_lfiwzx(DisasContext *ctx)
976 if (unlikely(!ctx->fpu_enabled)) {
977 gen_exception(ctx, POWERPC_EXCP_FPU);
980 gen_set_access_type(ctx, ACCESS_FLOAT);
982 t0 = tcg_temp_new_i64();
983 gen_addr_reg_index(ctx, EA);
984 gen_qemu_ld32u_i64(ctx, t0, EA);
985 set_fpr(rD(ctx->opcode), t0);
987 tcg_temp_free_i64(t0);
990 #define GEN_STXF(name, stop, opc2, opc3, type) \
991 static void glue(gen_, name##x)(DisasContext *ctx) \
995 if (unlikely(!ctx->fpu_enabled)) { \
996 gen_exception(ctx, POWERPC_EXCP_FPU); \
999 gen_set_access_type(ctx, ACCESS_FLOAT); \
1000 EA = tcg_temp_new(); \
1001 t0 = tcg_temp_new_i64(); \
1002 gen_addr_reg_index(ctx, EA); \
1003 get_fpr(t0, rS(ctx->opcode)); \
1004 gen_qemu_##stop(ctx, t0, EA); \
1005 tcg_temp_free(EA); \
1006 tcg_temp_free_i64(t0); \
1009 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
1011 TCGv_i32 tmp = tcg_temp_new_i32();
1012 gen_helper_tosingle(tmp, src);
1013 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
1014 tcg_temp_free_i32(tmp);
1017 /* stfdepx (external PID lfdx) */
1018 static void gen_stfdepx(DisasContext *ctx)
1023 if (unlikely(!ctx->fpu_enabled)) {
1024 gen_exception(ctx, POWERPC_EXCP_FPU);
1027 gen_set_access_type(ctx, ACCESS_FLOAT);
1028 EA = tcg_temp_new();
1029 t0 = tcg_temp_new_i64();
1030 gen_addr_reg_index(ctx, EA);
1031 get_fpr(t0, rD(ctx->opcode));
1032 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_Q));
1034 tcg_temp_free_i64(t0);
1038 static void gen_stfdp(DisasContext *ctx)
1042 if (unlikely(!ctx->fpu_enabled)) {
1043 gen_exception(ctx, POWERPC_EXCP_FPU);
1046 gen_set_access_type(ctx, ACCESS_FLOAT);
1047 EA = tcg_temp_new();
1048 t0 = tcg_temp_new_i64();
1049 gen_addr_imm_index(ctx, EA, 0);
1051 * We only need to swap high and low halves. gen_qemu_st64_i64
1052 * does necessary 64-bit byteswap already.
1054 if (unlikely(ctx->le_mode)) {
1055 get_fpr(t0, rD(ctx->opcode) + 1);
1056 gen_qemu_st64_i64(ctx, t0, EA);
1057 tcg_gen_addi_tl(EA, EA, 8);
1058 get_fpr(t0, rD(ctx->opcode));
1059 gen_qemu_st64_i64(ctx, t0, EA);
1061 get_fpr(t0, rD(ctx->opcode));
1062 gen_qemu_st64_i64(ctx, t0, EA);
1063 tcg_gen_addi_tl(EA, EA, 8);
1064 get_fpr(t0, rD(ctx->opcode) + 1);
1065 gen_qemu_st64_i64(ctx, t0, EA);
1068 tcg_temp_free_i64(t0);
1072 static void gen_stfdpx(DisasContext *ctx)
1076 if (unlikely(!ctx->fpu_enabled)) {
1077 gen_exception(ctx, POWERPC_EXCP_FPU);
1080 gen_set_access_type(ctx, ACCESS_FLOAT);
1081 EA = tcg_temp_new();
1082 t0 = tcg_temp_new_i64();
1083 gen_addr_reg_index(ctx, EA);
1085 * We only need to swap high and low halves. gen_qemu_st64_i64
1086 * does necessary 64-bit byteswap already.
1088 if (unlikely(ctx->le_mode)) {
1089 get_fpr(t0, rD(ctx->opcode) + 1);
1090 gen_qemu_st64_i64(ctx, t0, EA);
1091 tcg_gen_addi_tl(EA, EA, 8);
1092 get_fpr(t0, rD(ctx->opcode));
1093 gen_qemu_st64_i64(ctx, t0, EA);
1095 get_fpr(t0, rD(ctx->opcode));
1096 gen_qemu_st64_i64(ctx, t0, EA);
1097 tcg_gen_addi_tl(EA, EA, 8);
1098 get_fpr(t0, rD(ctx->opcode) + 1);
1099 gen_qemu_st64_i64(ctx, t0, EA);
1102 tcg_temp_free_i64(t0);
1106 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
1108 TCGv t0 = tcg_temp_new();
1109 tcg_gen_trunc_i64_tl(t0, arg1),
1110 gen_qemu_st32(ctx, t0, arg2);
1114 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
1116 /* POWER2 specific instructions */
1117 /* Quad manipulation (load/store two floats at a time) */
1120 static void gen_lfq(DisasContext *ctx)
1122 int rd = rD(ctx->opcode);
1125 gen_set_access_type(ctx, ACCESS_FLOAT);
1126 t0 = tcg_temp_new();
1127 t1 = tcg_temp_new_i64();
1128 gen_addr_imm_index(ctx, t0, 0);
1129 gen_qemu_ld64_i64(ctx, t1, t0);
1131 gen_addr_add(ctx, t0, t0, 8);
1132 gen_qemu_ld64_i64(ctx, t1, t0);
1133 set_fpr((rd + 1) % 32, t1);
1135 tcg_temp_free_i64(t1);
1139 static void gen_lfqu(DisasContext *ctx)
1141 int ra = rA(ctx->opcode);
1142 int rd = rD(ctx->opcode);
1145 gen_set_access_type(ctx, ACCESS_FLOAT);
1146 t0 = tcg_temp_new();
1147 t1 = tcg_temp_new();
1148 t2 = tcg_temp_new_i64();
1149 gen_addr_imm_index(ctx, t0, 0);
1150 gen_qemu_ld64_i64(ctx, t2, t0);
1152 gen_addr_add(ctx, t1, t0, 8);
1153 gen_qemu_ld64_i64(ctx, t2, t1);
1154 set_fpr((rd + 1) % 32, t2);
1156 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1160 tcg_temp_free_i64(t2);
1164 static void gen_lfqux(DisasContext *ctx)
1166 int ra = rA(ctx->opcode);
1167 int rd = rD(ctx->opcode);
1168 gen_set_access_type(ctx, ACCESS_FLOAT);
1171 t2 = tcg_temp_new_i64();
1172 t0 = tcg_temp_new();
1173 gen_addr_reg_index(ctx, t0);
1174 gen_qemu_ld64_i64(ctx, t2, t0);
1176 t1 = tcg_temp_new();
1177 gen_addr_add(ctx, t1, t0, 8);
1178 gen_qemu_ld64_i64(ctx, t2, t1);
1179 set_fpr((rd + 1) % 32, t2);
1182 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1185 tcg_temp_free_i64(t2);
1189 static void gen_lfqx(DisasContext *ctx)
1191 int rd = rD(ctx->opcode);
1194 gen_set_access_type(ctx, ACCESS_FLOAT);
1195 t0 = tcg_temp_new();
1196 t1 = tcg_temp_new_i64();
1197 gen_addr_reg_index(ctx, t0);
1198 gen_qemu_ld64_i64(ctx, t1, t0);
1200 gen_addr_add(ctx, t0, t0, 8);
1201 gen_qemu_ld64_i64(ctx, t1, t0);
1202 set_fpr((rd + 1) % 32, t1);
1204 tcg_temp_free_i64(t1);
1208 static void gen_stfq(DisasContext *ctx)
1210 int rd = rD(ctx->opcode);
1213 gen_set_access_type(ctx, ACCESS_FLOAT);
1214 t0 = tcg_temp_new();
1215 t1 = tcg_temp_new_i64();
1216 gen_addr_imm_index(ctx, t0, 0);
1218 gen_qemu_st64_i64(ctx, t1, t0);
1219 gen_addr_add(ctx, t0, t0, 8);
1220 get_fpr(t1, (rd + 1) % 32);
1221 gen_qemu_st64_i64(ctx, t1, t0);
1223 tcg_temp_free_i64(t1);
1227 static void gen_stfqu(DisasContext *ctx)
1229 int ra = rA(ctx->opcode);
1230 int rd = rD(ctx->opcode);
1233 gen_set_access_type(ctx, ACCESS_FLOAT);
1234 t2 = tcg_temp_new_i64();
1235 t0 = tcg_temp_new();
1236 gen_addr_imm_index(ctx, t0, 0);
1238 gen_qemu_st64_i64(ctx, t2, t0);
1239 t1 = tcg_temp_new();
1240 gen_addr_add(ctx, t1, t0, 8);
1241 get_fpr(t2, (rd + 1) % 32);
1242 gen_qemu_st64_i64(ctx, t2, t1);
1245 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1248 tcg_temp_free_i64(t2);
1252 static void gen_stfqux(DisasContext *ctx)
1254 int ra = rA(ctx->opcode);
1255 int rd = rD(ctx->opcode);
1258 gen_set_access_type(ctx, ACCESS_FLOAT);
1259 t2 = tcg_temp_new_i64();
1260 t0 = tcg_temp_new();
1261 gen_addr_reg_index(ctx, t0);
1263 gen_qemu_st64_i64(ctx, t2, t0);
1264 t1 = tcg_temp_new();
1265 gen_addr_add(ctx, t1, t0, 8);
1266 get_fpr(t2, (rd + 1) % 32);
1267 gen_qemu_st64_i64(ctx, t2, t1);
1270 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1273 tcg_temp_free_i64(t2);
1277 static void gen_stfqx(DisasContext *ctx)
1279 int rd = rD(ctx->opcode);
1282 gen_set_access_type(ctx, ACCESS_FLOAT);
1283 t1 = tcg_temp_new_i64();
1284 t0 = tcg_temp_new();
1285 gen_addr_reg_index(ctx, t0);
1287 gen_qemu_st64_i64(ctx, t1, t0);
1288 gen_addr_add(ctx, t0, t0, 8);
1289 get_fpr(t1, (rd + 1) % 32);
1290 gen_qemu_st64_i64(ctx, t1, t0);
1292 tcg_temp_free_i64(t1);
1295 /* Floating-point Load/Store Instructions */
1296 static bool do_lsfpsd(DisasContext *ctx, int rt, int ra, TCGv displ,
1297 bool update, bool store, bool single)
1301 REQUIRE_INSNS_FLAGS(ctx, FLOAT);
1303 if (update && ra == 0) {
1307 gen_set_access_type(ctx, ACCESS_FLOAT);
1308 t0 = tcg_temp_new_i64();
1309 ea = do_ea_calc(ctx, ra, displ);
1313 gen_qemu_st32fs(ctx, t0, ea);
1315 gen_qemu_st64_i64(ctx, t0, ea);
1319 gen_qemu_ld32fs(ctx, t0, ea);
1321 gen_qemu_ld64_i64(ctx, t0, ea);
1326 tcg_gen_mov_tl(cpu_gpr[ra], ea);
1328 tcg_temp_free_i64(t0);
1333 static bool do_lsfp_D(DisasContext *ctx, arg_D *a, bool update, bool store,
1336 return do_lsfpsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), update, store,
1340 static bool do_lsfp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool update,
1341 bool store, bool single)
1344 if (!resolve_PLS_D(ctx, &d, a)) {
1347 return do_lsfp_D(ctx, &d, update, store, single);
1350 static bool do_lsfp_X(DisasContext *ctx, arg_X *a, bool update,
1351 bool store, bool single)
1353 return do_lsfpsd(ctx, a->rt, a->ra, cpu_gpr[a->rb], update, store, single);
1356 TRANS(LFS, do_lsfp_D, false, false, true)
1357 TRANS(LFSU, do_lsfp_D, true, false, true)
1358 TRANS(LFSX, do_lsfp_X, false, false, true)
1359 TRANS(LFSUX, do_lsfp_X, true, false, true)
1360 TRANS(PLFS, do_lsfp_PLS_D, false, false, true)
1362 TRANS(LFD, do_lsfp_D, false, false, false)
1363 TRANS(LFDU, do_lsfp_D, true, false, false)
1364 TRANS(LFDX, do_lsfp_X, false, false, false)
1365 TRANS(LFDUX, do_lsfp_X, true, false, false)
1366 TRANS(PLFD, do_lsfp_PLS_D, false, false, false)
1368 TRANS(STFS, do_lsfp_D, false, true, true)
1369 TRANS(STFSU, do_lsfp_D, true, true, true)
1370 TRANS(STFSX, do_lsfp_X, false, true, true)
1371 TRANS(STFSUX, do_lsfp_X, true, true, true)
1372 TRANS(PSTFS, do_lsfp_PLS_D, false, true, true)
1374 TRANS(STFD, do_lsfp_D, false, true, false)
1375 TRANS(STFDU, do_lsfp_D, true, true, false)
1376 TRANS(STFDX, do_lsfp_X, false, true, false)
1377 TRANS(STFDUX, do_lsfp_X, true, true, false)
1378 TRANS(PSTFD, do_lsfp_PLS_D, false, true, false)
1380 #undef _GEN_FLOAT_ACB
1381 #undef GEN_FLOAT_ACB
1382 #undef _GEN_FLOAT_AB
1384 #undef _GEN_FLOAT_AC