4 * Standard FPU translation
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(cpu_env, arg);
15 gen_helper_float_check_status(cpu_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
24 tcg_temp_free_i32(tmp);
27 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
41 if (unlikely(!ctx->fpu_enabled)) { \
42 gen_exception(ctx, POWERPC_EXCP_FPU); \
45 t0 = tcg_temp_new_i64(); \
46 t1 = tcg_temp_new_i64(); \
47 t2 = tcg_temp_new_i64(); \
48 t3 = tcg_temp_new_i64(); \
49 gen_reset_fpstatus(); \
50 get_fpr(t0, rA(ctx->opcode)); \
51 get_fpr(t1, rC(ctx->opcode)); \
52 get_fpr(t2, rB(ctx->opcode)); \
53 gen_helper_f##op(t3, cpu_env, t0, t1, t2); \
55 gen_helper_frsp(t3, cpu_env, t3); \
57 set_fpr(rD(ctx->opcode), t3); \
59 gen_compute_fprf_float64(t3); \
61 if (unlikely(Rc(ctx->opcode) != 0)) { \
62 gen_set_cr1_from_fpscr(ctx); \
64 tcg_temp_free_i64(t0); \
65 tcg_temp_free_i64(t1); \
66 tcg_temp_free_i64(t2); \
67 tcg_temp_free_i64(t3); \
70 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
71 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
72 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
74 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
75 static void gen_f##name(DisasContext *ctx) \
80 if (unlikely(!ctx->fpu_enabled)) { \
81 gen_exception(ctx, POWERPC_EXCP_FPU); \
84 t0 = tcg_temp_new_i64(); \
85 t1 = tcg_temp_new_i64(); \
86 t2 = tcg_temp_new_i64(); \
87 gen_reset_fpstatus(); \
88 get_fpr(t0, rA(ctx->opcode)); \
89 get_fpr(t1, rB(ctx->opcode)); \
90 gen_helper_f##op(t2, cpu_env, t0, t1); \
92 gen_helper_frsp(t2, cpu_env, t2); \
94 set_fpr(rD(ctx->opcode), t2); \
96 gen_compute_fprf_float64(t2); \
98 if (unlikely(Rc(ctx->opcode) != 0)) { \
99 gen_set_cr1_from_fpscr(ctx); \
101 tcg_temp_free_i64(t0); \
102 tcg_temp_free_i64(t1); \
103 tcg_temp_free_i64(t2); \
105 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
106 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
107 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
109 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
110 static void gen_f##name(DisasContext *ctx) \
115 if (unlikely(!ctx->fpu_enabled)) { \
116 gen_exception(ctx, POWERPC_EXCP_FPU); \
119 t0 = tcg_temp_new_i64(); \
120 t1 = tcg_temp_new_i64(); \
121 t2 = tcg_temp_new_i64(); \
122 gen_reset_fpstatus(); \
123 get_fpr(t0, rA(ctx->opcode)); \
124 get_fpr(t1, rC(ctx->opcode)); \
125 gen_helper_f##op(t2, cpu_env, t0, t1); \
127 gen_helper_frsp(t2, cpu_env, t2); \
129 set_fpr(rD(ctx->opcode), t2); \
131 gen_compute_fprf_float64(t2); \
133 if (unlikely(Rc(ctx->opcode) != 0)) { \
134 gen_set_cr1_from_fpscr(ctx); \
136 tcg_temp_free_i64(t0); \
137 tcg_temp_free_i64(t1); \
138 tcg_temp_free_i64(t2); \
140 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
141 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
142 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
144 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
145 static void gen_f##name(DisasContext *ctx) \
149 if (unlikely(!ctx->fpu_enabled)) { \
150 gen_exception(ctx, POWERPC_EXCP_FPU); \
153 t0 = tcg_temp_new_i64(); \
154 t1 = tcg_temp_new_i64(); \
155 gen_reset_fpstatus(); \
156 get_fpr(t0, rB(ctx->opcode)); \
157 gen_helper_f##name(t1, cpu_env, t0); \
158 set_fpr(rD(ctx->opcode), t1); \
160 gen_compute_fprf_float64(t1); \
162 if (unlikely(Rc(ctx->opcode) != 0)) { \
163 gen_set_cr1_from_fpscr(ctx); \
165 tcg_temp_free_i64(t0); \
166 tcg_temp_free_i64(t1); \
169 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
170 static void gen_f##name(DisasContext *ctx) \
174 if (unlikely(!ctx->fpu_enabled)) { \
175 gen_exception(ctx, POWERPC_EXCP_FPU); \
178 t0 = tcg_temp_new_i64(); \
179 t1 = tcg_temp_new_i64(); \
180 gen_reset_fpstatus(); \
181 get_fpr(t0, rB(ctx->opcode)); \
182 gen_helper_f##name(t1, cpu_env, t0); \
183 set_fpr(rD(ctx->opcode), t1); \
185 gen_compute_fprf_float64(t1); \
187 if (unlikely(Rc(ctx->opcode) != 0)) { \
188 gen_set_cr1_from_fpscr(ctx); \
190 tcg_temp_free_i64(t0); \
191 tcg_temp_free_i64(t1); \
195 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
197 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
199 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
202 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
205 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
208 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
211 static void gen_frsqrtes(DisasContext *ctx)
215 if (unlikely(!ctx->fpu_enabled)) {
216 gen_exception(ctx, POWERPC_EXCP_FPU);
219 t0 = tcg_temp_new_i64();
220 t1 = tcg_temp_new_i64();
221 gen_reset_fpstatus();
222 get_fpr(t0, rB(ctx->opcode));
223 gen_helper_frsqrte(t1, cpu_env, t0);
224 gen_helper_frsp(t1, cpu_env, t1);
225 set_fpr(rD(ctx->opcode), t1);
226 gen_compute_fprf_float64(t1);
227 if (unlikely(Rc(ctx->opcode) != 0)) {
228 gen_set_cr1_from_fpscr(ctx);
230 tcg_temp_free_i64(t0);
231 tcg_temp_free_i64(t1);
235 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
237 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
241 static void gen_fsqrt(DisasContext *ctx)
245 if (unlikely(!ctx->fpu_enabled)) {
246 gen_exception(ctx, POWERPC_EXCP_FPU);
249 t0 = tcg_temp_new_i64();
250 t1 = tcg_temp_new_i64();
251 gen_reset_fpstatus();
252 get_fpr(t0, rB(ctx->opcode));
253 gen_helper_fsqrt(t1, cpu_env, t0);
254 set_fpr(rD(ctx->opcode), t1);
255 gen_compute_fprf_float64(t1);
256 if (unlikely(Rc(ctx->opcode) != 0)) {
257 gen_set_cr1_from_fpscr(ctx);
259 tcg_temp_free_i64(t0);
260 tcg_temp_free_i64(t1);
263 static void gen_fsqrts(DisasContext *ctx)
267 if (unlikely(!ctx->fpu_enabled)) {
268 gen_exception(ctx, POWERPC_EXCP_FPU);
271 t0 = tcg_temp_new_i64();
272 t1 = tcg_temp_new_i64();
273 gen_reset_fpstatus();
274 get_fpr(t0, rB(ctx->opcode));
275 gen_helper_fsqrt(t1, cpu_env, t0);
276 gen_helper_frsp(t1, cpu_env, t1);
277 set_fpr(rD(ctx->opcode), t1);
278 gen_compute_fprf_float64(t1);
279 if (unlikely(Rc(ctx->opcode) != 0)) {
280 gen_set_cr1_from_fpscr(ctx);
282 tcg_temp_free_i64(t0);
283 tcg_temp_free_i64(t1);
286 /*** Floating-Point multiply-and-add ***/
288 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
290 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
291 /* fnmadd - fnmadds */
292 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
293 /* fnmsub - fnmsubs */
294 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
296 /*** Floating-Point round & convert ***/
298 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
300 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
302 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
304 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
306 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
308 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
310 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
312 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
314 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
316 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
318 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
320 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
322 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
325 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
327 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
329 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
331 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
333 static void gen_ftdiv(DisasContext *ctx)
337 if (unlikely(!ctx->fpu_enabled)) {
338 gen_exception(ctx, POWERPC_EXCP_FPU);
341 t0 = tcg_temp_new_i64();
342 t1 = tcg_temp_new_i64();
343 get_fpr(t0, rA(ctx->opcode));
344 get_fpr(t1, rB(ctx->opcode));
345 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1);
346 tcg_temp_free_i64(t0);
347 tcg_temp_free_i64(t1);
350 static void gen_ftsqrt(DisasContext *ctx)
353 if (unlikely(!ctx->fpu_enabled)) {
354 gen_exception(ctx, POWERPC_EXCP_FPU);
357 t0 = tcg_temp_new_i64();
358 get_fpr(t0, rB(ctx->opcode));
359 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0);
360 tcg_temp_free_i64(t0);
365 /*** Floating-Point compare ***/
368 static void gen_fcmpo(DisasContext *ctx)
373 if (unlikely(!ctx->fpu_enabled)) {
374 gen_exception(ctx, POWERPC_EXCP_FPU);
377 t0 = tcg_temp_new_i64();
378 t1 = tcg_temp_new_i64();
379 gen_reset_fpstatus();
380 crf = tcg_const_i32(crfD(ctx->opcode));
381 get_fpr(t0, rA(ctx->opcode));
382 get_fpr(t1, rB(ctx->opcode));
383 gen_helper_fcmpo(cpu_env, t0, t1, crf);
384 tcg_temp_free_i32(crf);
385 gen_helper_float_check_status(cpu_env);
386 tcg_temp_free_i64(t0);
387 tcg_temp_free_i64(t1);
391 static void gen_fcmpu(DisasContext *ctx)
396 if (unlikely(!ctx->fpu_enabled)) {
397 gen_exception(ctx, POWERPC_EXCP_FPU);
400 t0 = tcg_temp_new_i64();
401 t1 = tcg_temp_new_i64();
402 gen_reset_fpstatus();
403 crf = tcg_const_i32(crfD(ctx->opcode));
404 get_fpr(t0, rA(ctx->opcode));
405 get_fpr(t1, rB(ctx->opcode));
406 gen_helper_fcmpu(cpu_env, t0, t1, crf);
407 tcg_temp_free_i32(crf);
408 gen_helper_float_check_status(cpu_env);
409 tcg_temp_free_i64(t0);
410 tcg_temp_free_i64(t1);
413 /*** Floating-point move ***/
415 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
416 static void gen_fabs(DisasContext *ctx)
420 if (unlikely(!ctx->fpu_enabled)) {
421 gen_exception(ctx, POWERPC_EXCP_FPU);
424 t0 = tcg_temp_new_i64();
425 t1 = tcg_temp_new_i64();
426 get_fpr(t0, rB(ctx->opcode));
427 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63));
428 set_fpr(rD(ctx->opcode), t1);
429 if (unlikely(Rc(ctx->opcode))) {
430 gen_set_cr1_from_fpscr(ctx);
432 tcg_temp_free_i64(t0);
433 tcg_temp_free_i64(t1);
437 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
438 static void gen_fmr(DisasContext *ctx)
441 if (unlikely(!ctx->fpu_enabled)) {
442 gen_exception(ctx, POWERPC_EXCP_FPU);
445 t0 = tcg_temp_new_i64();
446 get_fpr(t0, rB(ctx->opcode));
447 set_fpr(rD(ctx->opcode), t0);
448 if (unlikely(Rc(ctx->opcode))) {
449 gen_set_cr1_from_fpscr(ctx);
451 tcg_temp_free_i64(t0);
455 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
456 static void gen_fnabs(DisasContext *ctx)
460 if (unlikely(!ctx->fpu_enabled)) {
461 gen_exception(ctx, POWERPC_EXCP_FPU);
464 t0 = tcg_temp_new_i64();
465 t1 = tcg_temp_new_i64();
466 get_fpr(t0, rB(ctx->opcode));
467 tcg_gen_ori_i64(t1, t0, 1ULL << 63);
468 set_fpr(rD(ctx->opcode), t1);
469 if (unlikely(Rc(ctx->opcode))) {
470 gen_set_cr1_from_fpscr(ctx);
472 tcg_temp_free_i64(t0);
473 tcg_temp_free_i64(t1);
477 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
478 static void gen_fneg(DisasContext *ctx)
482 if (unlikely(!ctx->fpu_enabled)) {
483 gen_exception(ctx, POWERPC_EXCP_FPU);
486 t0 = tcg_temp_new_i64();
487 t1 = tcg_temp_new_i64();
488 get_fpr(t0, rB(ctx->opcode));
489 tcg_gen_xori_i64(t1, t0, 1ULL << 63);
490 set_fpr(rD(ctx->opcode), t1);
491 if (unlikely(Rc(ctx->opcode))) {
492 gen_set_cr1_from_fpscr(ctx);
494 tcg_temp_free_i64(t0);
495 tcg_temp_free_i64(t1);
498 /* fcpsgn: PowerPC 2.05 specification */
499 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
500 static void gen_fcpsgn(DisasContext *ctx)
505 if (unlikely(!ctx->fpu_enabled)) {
506 gen_exception(ctx, POWERPC_EXCP_FPU);
509 t0 = tcg_temp_new_i64();
510 t1 = tcg_temp_new_i64();
511 t2 = tcg_temp_new_i64();
512 get_fpr(t0, rA(ctx->opcode));
513 get_fpr(t1, rB(ctx->opcode));
514 tcg_gen_deposit_i64(t2, t0, t1, 0, 63);
515 set_fpr(rD(ctx->opcode), t2);
516 if (unlikely(Rc(ctx->opcode))) {
517 gen_set_cr1_from_fpscr(ctx);
519 tcg_temp_free_i64(t0);
520 tcg_temp_free_i64(t1);
521 tcg_temp_free_i64(t2);
524 static void gen_fmrgew(DisasContext *ctx)
529 if (unlikely(!ctx->fpu_enabled)) {
530 gen_exception(ctx, POWERPC_EXCP_FPU);
533 b0 = tcg_temp_new_i64();
534 t0 = tcg_temp_new_i64();
535 t1 = tcg_temp_new_i64();
536 get_fpr(t0, rB(ctx->opcode));
537 tcg_gen_shri_i64(b0, t0, 32);
538 get_fpr(t0, rA(ctx->opcode));
539 tcg_gen_deposit_i64(t1, t0, b0, 0, 32);
540 set_fpr(rD(ctx->opcode), t1);
541 tcg_temp_free_i64(b0);
542 tcg_temp_free_i64(t0);
543 tcg_temp_free_i64(t1);
546 static void gen_fmrgow(DisasContext *ctx)
551 if (unlikely(!ctx->fpu_enabled)) {
552 gen_exception(ctx, POWERPC_EXCP_FPU);
555 t0 = tcg_temp_new_i64();
556 t1 = tcg_temp_new_i64();
557 t2 = tcg_temp_new_i64();
558 get_fpr(t0, rB(ctx->opcode));
559 get_fpr(t1, rA(ctx->opcode));
560 tcg_gen_deposit_i64(t2, t0, t1, 32, 32);
561 set_fpr(rD(ctx->opcode), t2);
562 tcg_temp_free_i64(t0);
563 tcg_temp_free_i64(t1);
564 tcg_temp_free_i64(t2);
567 /*** Floating-Point status & ctrl register ***/
570 static void gen_mcrfs(DisasContext *ctx)
572 TCGv tmp = tcg_temp_new();
574 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
579 if (unlikely(!ctx->fpu_enabled)) {
580 gen_exception(ctx, POWERPC_EXCP_FPU);
583 bfa = crfS(ctx->opcode);
586 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
587 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
588 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)],
591 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
592 /* Only the exception bits (including FX) should be cleared if read */
593 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr,
594 ~((0xF << shift) & FP_EX_CLEAR_BITS));
595 /* FEX and VX need to be updated, so don't set fpscr directly */
596 tmask = tcg_const_i32(1 << nibble);
597 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask);
598 tcg_temp_free_i32(tmask);
599 tcg_temp_free_i64(tnew_fpscr);
603 static void gen_mffs(DisasContext *ctx)
606 if (unlikely(!ctx->fpu_enabled)) {
607 gen_exception(ctx, POWERPC_EXCP_FPU);
610 t0 = tcg_temp_new_i64();
611 gen_reset_fpstatus();
612 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
613 set_fpr(rD(ctx->opcode), t0);
614 if (unlikely(Rc(ctx->opcode))) {
615 gen_set_cr1_from_fpscr(ctx);
617 tcg_temp_free_i64(t0);
621 static void gen_mffsl(DisasContext *ctx)
625 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
626 return gen_mffs(ctx);
629 if (unlikely(!ctx->fpu_enabled)) {
630 gen_exception(ctx, POWERPC_EXCP_FPU);
633 t0 = tcg_temp_new_i64();
634 gen_reset_fpstatus();
635 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
636 /* Mask everything except mode, status, and enables. */
637 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN);
638 set_fpr(rD(ctx->opcode), t0);
639 tcg_temp_free_i64(t0);
643 static void gen_mffsce(DisasContext *ctx)
648 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
649 return gen_mffs(ctx);
652 if (unlikely(!ctx->fpu_enabled)) {
653 gen_exception(ctx, POWERPC_EXCP_FPU);
657 t0 = tcg_temp_new_i64();
659 gen_reset_fpstatus();
660 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
661 set_fpr(rD(ctx->opcode), t0);
663 /* Clear exception enable bits in the FPSCR. */
664 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES);
665 mask = tcg_const_i32(0x0003);
666 gen_helper_store_fpscr(cpu_env, t0, mask);
668 tcg_temp_free_i32(mask);
669 tcg_temp_free_i64(t0);
672 static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1)
674 TCGv_i64 t0 = tcg_temp_new_i64();
675 TCGv_i32 mask = tcg_const_i32(0x0001);
677 gen_reset_fpstatus();
678 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
679 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN);
680 set_fpr(rD(ctx->opcode), t0);
682 /* Mask FPSCR value to clear RN. */
683 tcg_gen_andi_i64(t0, t0, ~FP_RN);
685 /* Merge RN into FPSCR value. */
686 tcg_gen_or_i64(t0, t0, t1);
688 gen_helper_store_fpscr(cpu_env, t0, mask);
690 tcg_temp_free_i32(mask);
691 tcg_temp_free_i64(t0);
695 static void gen_mffscrn(DisasContext *ctx)
699 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
700 return gen_mffs(ctx);
703 if (unlikely(!ctx->fpu_enabled)) {
704 gen_exception(ctx, POWERPC_EXCP_FPU);
708 t1 = tcg_temp_new_i64();
709 get_fpr(t1, rB(ctx->opcode));
710 /* Mask FRB to get just RN. */
711 tcg_gen_andi_i64(t1, t1, FP_RN);
713 gen_helper_mffscrn(ctx, t1);
715 tcg_temp_free_i64(t1);
719 static void gen_mffscrni(DisasContext *ctx)
723 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
724 return gen_mffs(ctx);
727 if (unlikely(!ctx->fpu_enabled)) {
728 gen_exception(ctx, POWERPC_EXCP_FPU);
732 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode));
734 gen_helper_mffscrn(ctx, t1);
736 tcg_temp_free_i64(t1);
740 static void gen_mtfsb0(DisasContext *ctx)
744 if (unlikely(!ctx->fpu_enabled)) {
745 gen_exception(ctx, POWERPC_EXCP_FPU);
748 crb = 31 - crbD(ctx->opcode);
749 gen_reset_fpstatus();
750 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
752 t0 = tcg_const_i32(crb);
753 gen_helper_fpscr_clrbit(cpu_env, t0);
754 tcg_temp_free_i32(t0);
756 if (unlikely(Rc(ctx->opcode) != 0)) {
757 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
758 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
763 static void gen_mtfsb1(DisasContext *ctx)
767 if (unlikely(!ctx->fpu_enabled)) {
768 gen_exception(ctx, POWERPC_EXCP_FPU);
771 crb = 31 - crbD(ctx->opcode);
772 gen_reset_fpstatus();
773 /* XXX: we pretend we can only do IEEE floating-point computations */
774 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
776 t0 = tcg_const_i32(crb);
777 gen_helper_fpscr_setbit(cpu_env, t0);
778 tcg_temp_free_i32(t0);
780 if (unlikely(Rc(ctx->opcode) != 0)) {
781 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
782 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
784 /* We can raise a deferred exception */
785 gen_helper_float_check_status(cpu_env);
789 static void gen_mtfsf(DisasContext *ctx)
795 if (unlikely(!ctx->fpu_enabled)) {
796 gen_exception(ctx, POWERPC_EXCP_FPU);
799 flm = FPFLM(ctx->opcode);
800 l = FPL(ctx->opcode);
801 w = FPW(ctx->opcode);
802 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
803 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
806 gen_reset_fpstatus();
808 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
810 t0 = tcg_const_i32(flm << (w * 8));
812 t1 = tcg_temp_new_i64();
813 get_fpr(t1, rB(ctx->opcode));
814 gen_helper_store_fpscr(cpu_env, t1, t0);
815 tcg_temp_free_i32(t0);
816 if (unlikely(Rc(ctx->opcode) != 0)) {
817 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
818 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
820 /* We can raise a deferred exception */
821 gen_helper_float_check_status(cpu_env);
822 tcg_temp_free_i64(t1);
826 static void gen_mtfsfi(DisasContext *ctx)
832 if (unlikely(!ctx->fpu_enabled)) {
833 gen_exception(ctx, POWERPC_EXCP_FPU);
836 w = FPW(ctx->opcode);
837 bf = FPBF(ctx->opcode);
838 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
839 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
842 sh = (8 * w) + 7 - bf;
843 gen_reset_fpstatus();
844 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
845 t1 = tcg_const_i32(1 << sh);
846 gen_helper_store_fpscr(cpu_env, t0, t1);
847 tcg_temp_free_i64(t0);
848 tcg_temp_free_i32(t1);
849 if (unlikely(Rc(ctx->opcode) != 0)) {
850 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
851 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
853 /* We can raise a deferred exception */
854 gen_helper_float_check_status(cpu_env);
857 /*** Floating-point load ***/
858 #define GEN_LDF(name, ldop, opc, type) \
859 static void glue(gen_, name)(DisasContext *ctx) \
863 if (unlikely(!ctx->fpu_enabled)) { \
864 gen_exception(ctx, POWERPC_EXCP_FPU); \
867 gen_set_access_type(ctx, ACCESS_FLOAT); \
868 EA = tcg_temp_new(); \
869 t0 = tcg_temp_new_i64(); \
870 gen_addr_imm_index(ctx, EA, 0); \
871 gen_qemu_##ldop(ctx, t0, EA); \
872 set_fpr(rD(ctx->opcode), t0); \
874 tcg_temp_free_i64(t0); \
877 #define GEN_LDUF(name, ldop, opc, type) \
878 static void glue(gen_, name##u)(DisasContext *ctx) \
882 if (unlikely(!ctx->fpu_enabled)) { \
883 gen_exception(ctx, POWERPC_EXCP_FPU); \
886 if (unlikely(rA(ctx->opcode) == 0)) { \
887 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
890 gen_set_access_type(ctx, ACCESS_FLOAT); \
891 EA = tcg_temp_new(); \
892 t0 = tcg_temp_new_i64(); \
893 gen_addr_imm_index(ctx, EA, 0); \
894 gen_qemu_##ldop(ctx, t0, EA); \
895 set_fpr(rD(ctx->opcode), t0); \
896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
898 tcg_temp_free_i64(t0); \
901 #define GEN_LDUXF(name, ldop, opc, type) \
902 static void glue(gen_, name##ux)(DisasContext *ctx) \
906 if (unlikely(!ctx->fpu_enabled)) { \
907 gen_exception(ctx, POWERPC_EXCP_FPU); \
910 t0 = tcg_temp_new_i64(); \
911 if (unlikely(rA(ctx->opcode) == 0)) { \
912 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
915 gen_set_access_type(ctx, ACCESS_FLOAT); \
916 EA = tcg_temp_new(); \
917 gen_addr_reg_index(ctx, EA); \
918 gen_qemu_##ldop(ctx, t0, EA); \
919 set_fpr(rD(ctx->opcode), t0); \
920 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
922 tcg_temp_free_i64(t0); \
925 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
926 static void glue(gen_, name##x)(DisasContext *ctx) \
930 if (unlikely(!ctx->fpu_enabled)) { \
931 gen_exception(ctx, POWERPC_EXCP_FPU); \
934 gen_set_access_type(ctx, ACCESS_FLOAT); \
935 EA = tcg_temp_new(); \
936 t0 = tcg_temp_new_i64(); \
937 gen_addr_reg_index(ctx, EA); \
938 gen_qemu_##ldop(ctx, t0, EA); \
939 set_fpr(rD(ctx->opcode), t0); \
941 tcg_temp_free_i64(t0); \
944 #define GEN_LDFS(name, ldop, op, type) \
945 GEN_LDF(name, ldop, op | 0x20, type); \
946 GEN_LDUF(name, ldop, op | 0x21, type); \
947 GEN_LDUXF(name, ldop, op | 0x01, type); \
948 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
950 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
952 TCGv_i32 tmp = tcg_temp_new_i32();
953 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
954 gen_helper_todouble(dest, tmp);
955 tcg_temp_free_i32(tmp);
958 /* lfd lfdu lfdux lfdx */
959 GEN_LDFS(lfd, ld64_i64, 0x12, PPC_FLOAT);
960 /* lfs lfsu lfsux lfsx */
961 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
963 /* lfdepx (external PID lfdx) */
964 static void gen_lfdepx(DisasContext *ctx)
969 if (unlikely(!ctx->fpu_enabled)) {
970 gen_exception(ctx, POWERPC_EXCP_FPU);
973 gen_set_access_type(ctx, ACCESS_FLOAT);
975 t0 = tcg_temp_new_i64();
976 gen_addr_reg_index(ctx, EA);
977 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_Q));
978 set_fpr(rD(ctx->opcode), t0);
980 tcg_temp_free_i64(t0);
984 static void gen_lfdp(DisasContext *ctx)
988 if (unlikely(!ctx->fpu_enabled)) {
989 gen_exception(ctx, POWERPC_EXCP_FPU);
992 gen_set_access_type(ctx, ACCESS_FLOAT);
994 gen_addr_imm_index(ctx, EA, 0);
995 t0 = tcg_temp_new_i64();
997 * We only need to swap high and low halves. gen_qemu_ld64_i64
998 * does necessary 64-bit byteswap already.
1000 if (unlikely(ctx->le_mode)) {
1001 gen_qemu_ld64_i64(ctx, t0, EA);
1002 set_fpr(rD(ctx->opcode) + 1, t0);
1003 tcg_gen_addi_tl(EA, EA, 8);
1004 gen_qemu_ld64_i64(ctx, t0, EA);
1005 set_fpr(rD(ctx->opcode), t0);
1007 gen_qemu_ld64_i64(ctx, t0, EA);
1008 set_fpr(rD(ctx->opcode), t0);
1009 tcg_gen_addi_tl(EA, EA, 8);
1010 gen_qemu_ld64_i64(ctx, t0, EA);
1011 set_fpr(rD(ctx->opcode) + 1, t0);
1014 tcg_temp_free_i64(t0);
1018 static void gen_lfdpx(DisasContext *ctx)
1022 if (unlikely(!ctx->fpu_enabled)) {
1023 gen_exception(ctx, POWERPC_EXCP_FPU);
1026 gen_set_access_type(ctx, ACCESS_FLOAT);
1027 EA = tcg_temp_new();
1028 gen_addr_reg_index(ctx, EA);
1029 t0 = tcg_temp_new_i64();
1031 * We only need to swap high and low halves. gen_qemu_ld64_i64
1032 * does necessary 64-bit byteswap already.
1034 if (unlikely(ctx->le_mode)) {
1035 gen_qemu_ld64_i64(ctx, t0, EA);
1036 set_fpr(rD(ctx->opcode) + 1, t0);
1037 tcg_gen_addi_tl(EA, EA, 8);
1038 gen_qemu_ld64_i64(ctx, t0, EA);
1039 set_fpr(rD(ctx->opcode), t0);
1041 gen_qemu_ld64_i64(ctx, t0, EA);
1042 set_fpr(rD(ctx->opcode), t0);
1043 tcg_gen_addi_tl(EA, EA, 8);
1044 gen_qemu_ld64_i64(ctx, t0, EA);
1045 set_fpr(rD(ctx->opcode) + 1, t0);
1048 tcg_temp_free_i64(t0);
1052 static void gen_lfiwax(DisasContext *ctx)
1057 if (unlikely(!ctx->fpu_enabled)) {
1058 gen_exception(ctx, POWERPC_EXCP_FPU);
1061 gen_set_access_type(ctx, ACCESS_FLOAT);
1062 EA = tcg_temp_new();
1063 t0 = tcg_temp_new();
1064 t1 = tcg_temp_new_i64();
1065 gen_addr_reg_index(ctx, EA);
1066 gen_qemu_ld32s(ctx, t0, EA);
1067 tcg_gen_ext_tl_i64(t1, t0);
1068 set_fpr(rD(ctx->opcode), t1);
1071 tcg_temp_free_i64(t1);
1075 static void gen_lfiwzx(DisasContext *ctx)
1079 if (unlikely(!ctx->fpu_enabled)) {
1080 gen_exception(ctx, POWERPC_EXCP_FPU);
1083 gen_set_access_type(ctx, ACCESS_FLOAT);
1084 EA = tcg_temp_new();
1085 t0 = tcg_temp_new_i64();
1086 gen_addr_reg_index(ctx, EA);
1087 gen_qemu_ld32u_i64(ctx, t0, EA);
1088 set_fpr(rD(ctx->opcode), t0);
1090 tcg_temp_free_i64(t0);
1092 /*** Floating-point store ***/
1093 #define GEN_STF(name, stop, opc, type) \
1094 static void glue(gen_, name)(DisasContext *ctx) \
1098 if (unlikely(!ctx->fpu_enabled)) { \
1099 gen_exception(ctx, POWERPC_EXCP_FPU); \
1102 gen_set_access_type(ctx, ACCESS_FLOAT); \
1103 EA = tcg_temp_new(); \
1104 t0 = tcg_temp_new_i64(); \
1105 gen_addr_imm_index(ctx, EA, 0); \
1106 get_fpr(t0, rS(ctx->opcode)); \
1107 gen_qemu_##stop(ctx, t0, EA); \
1108 tcg_temp_free(EA); \
1109 tcg_temp_free_i64(t0); \
1112 #define GEN_STUF(name, stop, opc, type) \
1113 static void glue(gen_, name##u)(DisasContext *ctx) \
1117 if (unlikely(!ctx->fpu_enabled)) { \
1118 gen_exception(ctx, POWERPC_EXCP_FPU); \
1121 if (unlikely(rA(ctx->opcode) == 0)) { \
1122 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1125 gen_set_access_type(ctx, ACCESS_FLOAT); \
1126 EA = tcg_temp_new(); \
1127 t0 = tcg_temp_new_i64(); \
1128 gen_addr_imm_index(ctx, EA, 0); \
1129 get_fpr(t0, rS(ctx->opcode)); \
1130 gen_qemu_##stop(ctx, t0, EA); \
1131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1132 tcg_temp_free(EA); \
1133 tcg_temp_free_i64(t0); \
1136 #define GEN_STUXF(name, stop, opc, type) \
1137 static void glue(gen_, name##ux)(DisasContext *ctx) \
1141 if (unlikely(!ctx->fpu_enabled)) { \
1142 gen_exception(ctx, POWERPC_EXCP_FPU); \
1145 if (unlikely(rA(ctx->opcode) == 0)) { \
1146 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1149 gen_set_access_type(ctx, ACCESS_FLOAT); \
1150 EA = tcg_temp_new(); \
1151 t0 = tcg_temp_new_i64(); \
1152 gen_addr_reg_index(ctx, EA); \
1153 get_fpr(t0, rS(ctx->opcode)); \
1154 gen_qemu_##stop(ctx, t0, EA); \
1155 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1156 tcg_temp_free(EA); \
1157 tcg_temp_free_i64(t0); \
1160 #define GEN_STXF(name, stop, opc2, opc3, type) \
1161 static void glue(gen_, name##x)(DisasContext *ctx) \
1165 if (unlikely(!ctx->fpu_enabled)) { \
1166 gen_exception(ctx, POWERPC_EXCP_FPU); \
1169 gen_set_access_type(ctx, ACCESS_FLOAT); \
1170 EA = tcg_temp_new(); \
1171 t0 = tcg_temp_new_i64(); \
1172 gen_addr_reg_index(ctx, EA); \
1173 get_fpr(t0, rS(ctx->opcode)); \
1174 gen_qemu_##stop(ctx, t0, EA); \
1175 tcg_temp_free(EA); \
1176 tcg_temp_free_i64(t0); \
1179 #define GEN_STFS(name, stop, op, type) \
1180 GEN_STF(name, stop, op | 0x20, type); \
1181 GEN_STUF(name, stop, op | 0x21, type); \
1182 GEN_STUXF(name, stop, op | 0x01, type); \
1183 GEN_STXF(name, stop, 0x17, op | 0x00, type)
1185 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
1187 TCGv_i32 tmp = tcg_temp_new_i32();
1188 gen_helper_tosingle(tmp, src);
1189 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
1190 tcg_temp_free_i32(tmp);
1193 /* stfd stfdu stfdux stfdx */
1194 GEN_STFS(stfd, st64_i64, 0x16, PPC_FLOAT);
1195 /* stfs stfsu stfsux stfsx */
1196 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
1198 /* stfdepx (external PID lfdx) */
1199 static void gen_stfdepx(DisasContext *ctx)
1204 if (unlikely(!ctx->fpu_enabled)) {
1205 gen_exception(ctx, POWERPC_EXCP_FPU);
1208 gen_set_access_type(ctx, ACCESS_FLOAT);
1209 EA = tcg_temp_new();
1210 t0 = tcg_temp_new_i64();
1211 gen_addr_reg_index(ctx, EA);
1212 get_fpr(t0, rD(ctx->opcode));
1213 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_Q));
1215 tcg_temp_free_i64(t0);
1219 static void gen_stfdp(DisasContext *ctx)
1223 if (unlikely(!ctx->fpu_enabled)) {
1224 gen_exception(ctx, POWERPC_EXCP_FPU);
1227 gen_set_access_type(ctx, ACCESS_FLOAT);
1228 EA = tcg_temp_new();
1229 t0 = tcg_temp_new_i64();
1230 gen_addr_imm_index(ctx, EA, 0);
1232 * We only need to swap high and low halves. gen_qemu_st64_i64
1233 * does necessary 64-bit byteswap already.
1235 if (unlikely(ctx->le_mode)) {
1236 get_fpr(t0, rD(ctx->opcode) + 1);
1237 gen_qemu_st64_i64(ctx, t0, EA);
1238 tcg_gen_addi_tl(EA, EA, 8);
1239 get_fpr(t0, rD(ctx->opcode));
1240 gen_qemu_st64_i64(ctx, t0, EA);
1242 get_fpr(t0, rD(ctx->opcode));
1243 gen_qemu_st64_i64(ctx, t0, EA);
1244 tcg_gen_addi_tl(EA, EA, 8);
1245 get_fpr(t0, rD(ctx->opcode) + 1);
1246 gen_qemu_st64_i64(ctx, t0, EA);
1249 tcg_temp_free_i64(t0);
1253 static void gen_stfdpx(DisasContext *ctx)
1257 if (unlikely(!ctx->fpu_enabled)) {
1258 gen_exception(ctx, POWERPC_EXCP_FPU);
1261 gen_set_access_type(ctx, ACCESS_FLOAT);
1262 EA = tcg_temp_new();
1263 t0 = tcg_temp_new_i64();
1264 gen_addr_reg_index(ctx, EA);
1266 * We only need to swap high and low halves. gen_qemu_st64_i64
1267 * does necessary 64-bit byteswap already.
1269 if (unlikely(ctx->le_mode)) {
1270 get_fpr(t0, rD(ctx->opcode) + 1);
1271 gen_qemu_st64_i64(ctx, t0, EA);
1272 tcg_gen_addi_tl(EA, EA, 8);
1273 get_fpr(t0, rD(ctx->opcode));
1274 gen_qemu_st64_i64(ctx, t0, EA);
1276 get_fpr(t0, rD(ctx->opcode));
1277 gen_qemu_st64_i64(ctx, t0, EA);
1278 tcg_gen_addi_tl(EA, EA, 8);
1279 get_fpr(t0, rD(ctx->opcode) + 1);
1280 gen_qemu_st64_i64(ctx, t0, EA);
1283 tcg_temp_free_i64(t0);
1287 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
1289 TCGv t0 = tcg_temp_new();
1290 tcg_gen_trunc_i64_tl(t0, arg1),
1291 gen_qemu_st32(ctx, t0, arg2);
1295 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
1297 /* POWER2 specific instructions */
1298 /* Quad manipulation (load/store two floats at a time) */
1301 static void gen_lfq(DisasContext *ctx)
1303 int rd = rD(ctx->opcode);
1306 gen_set_access_type(ctx, ACCESS_FLOAT);
1307 t0 = tcg_temp_new();
1308 t1 = tcg_temp_new_i64();
1309 gen_addr_imm_index(ctx, t0, 0);
1310 gen_qemu_ld64_i64(ctx, t1, t0);
1312 gen_addr_add(ctx, t0, t0, 8);
1313 gen_qemu_ld64_i64(ctx, t1, t0);
1314 set_fpr((rd + 1) % 32, t1);
1316 tcg_temp_free_i64(t1);
1320 static void gen_lfqu(DisasContext *ctx)
1322 int ra = rA(ctx->opcode);
1323 int rd = rD(ctx->opcode);
1326 gen_set_access_type(ctx, ACCESS_FLOAT);
1327 t0 = tcg_temp_new();
1328 t1 = tcg_temp_new();
1329 t2 = tcg_temp_new_i64();
1330 gen_addr_imm_index(ctx, t0, 0);
1331 gen_qemu_ld64_i64(ctx, t2, t0);
1333 gen_addr_add(ctx, t1, t0, 8);
1334 gen_qemu_ld64_i64(ctx, t2, t1);
1335 set_fpr((rd + 1) % 32, t2);
1337 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1341 tcg_temp_free_i64(t2);
1345 static void gen_lfqux(DisasContext *ctx)
1347 int ra = rA(ctx->opcode);
1348 int rd = rD(ctx->opcode);
1349 gen_set_access_type(ctx, ACCESS_FLOAT);
1352 t2 = tcg_temp_new_i64();
1353 t0 = tcg_temp_new();
1354 gen_addr_reg_index(ctx, t0);
1355 gen_qemu_ld64_i64(ctx, t2, t0);
1357 t1 = tcg_temp_new();
1358 gen_addr_add(ctx, t1, t0, 8);
1359 gen_qemu_ld64_i64(ctx, t2, t1);
1360 set_fpr((rd + 1) % 32, t2);
1363 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1366 tcg_temp_free_i64(t2);
1370 static void gen_lfqx(DisasContext *ctx)
1372 int rd = rD(ctx->opcode);
1375 gen_set_access_type(ctx, ACCESS_FLOAT);
1376 t0 = tcg_temp_new();
1377 t1 = tcg_temp_new_i64();
1378 gen_addr_reg_index(ctx, t0);
1379 gen_qemu_ld64_i64(ctx, t1, t0);
1381 gen_addr_add(ctx, t0, t0, 8);
1382 gen_qemu_ld64_i64(ctx, t1, t0);
1383 set_fpr((rd + 1) % 32, t1);
1385 tcg_temp_free_i64(t1);
1389 static void gen_stfq(DisasContext *ctx)
1391 int rd = rD(ctx->opcode);
1394 gen_set_access_type(ctx, ACCESS_FLOAT);
1395 t0 = tcg_temp_new();
1396 t1 = tcg_temp_new_i64();
1397 gen_addr_imm_index(ctx, t0, 0);
1399 gen_qemu_st64_i64(ctx, t1, t0);
1400 gen_addr_add(ctx, t0, t0, 8);
1401 get_fpr(t1, (rd + 1) % 32);
1402 gen_qemu_st64_i64(ctx, t1, t0);
1404 tcg_temp_free_i64(t1);
1408 static void gen_stfqu(DisasContext *ctx)
1410 int ra = rA(ctx->opcode);
1411 int rd = rD(ctx->opcode);
1414 gen_set_access_type(ctx, ACCESS_FLOAT);
1415 t2 = tcg_temp_new_i64();
1416 t0 = tcg_temp_new();
1417 gen_addr_imm_index(ctx, t0, 0);
1419 gen_qemu_st64_i64(ctx, t2, t0);
1420 t1 = tcg_temp_new();
1421 gen_addr_add(ctx, t1, t0, 8);
1422 get_fpr(t2, (rd + 1) % 32);
1423 gen_qemu_st64_i64(ctx, t2, t1);
1426 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1429 tcg_temp_free_i64(t2);
1433 static void gen_stfqux(DisasContext *ctx)
1435 int ra = rA(ctx->opcode);
1436 int rd = rD(ctx->opcode);
1439 gen_set_access_type(ctx, ACCESS_FLOAT);
1440 t2 = tcg_temp_new_i64();
1441 t0 = tcg_temp_new();
1442 gen_addr_reg_index(ctx, t0);
1444 gen_qemu_st64_i64(ctx, t2, t0);
1445 t1 = tcg_temp_new();
1446 gen_addr_add(ctx, t1, t0, 8);
1447 get_fpr(t2, (rd + 1) % 32);
1448 gen_qemu_st64_i64(ctx, t2, t1);
1451 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1454 tcg_temp_free_i64(t2);
1458 static void gen_stfqx(DisasContext *ctx)
1460 int rd = rD(ctx->opcode);
1463 gen_set_access_type(ctx, ACCESS_FLOAT);
1464 t1 = tcg_temp_new_i64();
1465 t0 = tcg_temp_new();
1466 gen_addr_reg_index(ctx, t0);
1468 gen_qemu_st64_i64(ctx, t1, t0);
1469 gen_addr_add(ctx, t0, t0, 8);
1470 get_fpr(t1, (rd + 1) % 32);
1471 gen_qemu_st64_i64(ctx, t1, t0);
1473 tcg_temp_free_i64(t1);
1476 #undef _GEN_FLOAT_ACB
1477 #undef GEN_FLOAT_ACB
1478 #undef _GEN_FLOAT_AB
1480 #undef _GEN_FLOAT_AC