4 * Standard FPU translation
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env
);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg
)
14 gen_helper_compute_fprf_float64(cpu_env
, arg
);
15 gen_helper_float_check_status(cpu_env
);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext
*ctx
)
21 TCGv_i32 tmp
= tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp
, cpu_fpscr
);
23 tcg_gen_shri_i32(cpu_crf
[1], tmp
, 28);
24 tcg_temp_free_i32(tmp
);
27 static void gen_set_cr1_from_fpscr(DisasContext
*ctx
)
29 tcg_gen_shri_tl(cpu_crf
[1], cpu_fpscr
, 28);
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
41 if (unlikely(!ctx->fpu_enabled)) { \
42 gen_exception(ctx, POWERPC_EXCP_FPU); \
45 t0 = tcg_temp_new_i64(); \
46 t1 = tcg_temp_new_i64(); \
47 t2 = tcg_temp_new_i64(); \
48 t3 = tcg_temp_new_i64(); \
49 gen_reset_fpstatus(); \
50 get_fpr(t0, rA(ctx->opcode)); \
51 get_fpr(t1, rC(ctx->opcode)); \
52 get_fpr(t2, rB(ctx->opcode)); \
53 gen_helper_f##op(t3, cpu_env, t0, t1, t2); \
55 gen_helper_frsp(t3, cpu_env, t3); \
57 set_fpr(rD(ctx->opcode), t3); \
59 gen_compute_fprf_float64(t3); \
61 if (unlikely(Rc(ctx->opcode) != 0)) { \
62 gen_set_cr1_from_fpscr(ctx); \
64 tcg_temp_free_i64(t0); \
65 tcg_temp_free_i64(t1); \
66 tcg_temp_free_i64(t2); \
67 tcg_temp_free_i64(t3); \
70 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
71 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
72 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
74 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
75 static void gen_f##name(DisasContext *ctx) \
80 if (unlikely(!ctx->fpu_enabled)) { \
81 gen_exception(ctx, POWERPC_EXCP_FPU); \
84 t0 = tcg_temp_new_i64(); \
85 t1 = tcg_temp_new_i64(); \
86 t2 = tcg_temp_new_i64(); \
87 gen_reset_fpstatus(); \
88 get_fpr(t0, rA(ctx->opcode)); \
89 get_fpr(t1, rB(ctx->opcode)); \
90 gen_helper_f##op(t2, cpu_env, t0, t1); \
92 gen_helper_frsp(t2, cpu_env, t2); \
94 set_fpr(rD(ctx->opcode), t2); \
96 gen_compute_fprf_float64(t2); \
98 if (unlikely(Rc(ctx->opcode) != 0)) { \
99 gen_set_cr1_from_fpscr(ctx); \
101 tcg_temp_free_i64(t0); \
102 tcg_temp_free_i64(t1); \
103 tcg_temp_free_i64(t2); \
105 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
106 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
107 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
109 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
110 static void gen_f##name(DisasContext *ctx) \
115 if (unlikely(!ctx->fpu_enabled)) { \
116 gen_exception(ctx, POWERPC_EXCP_FPU); \
119 t0 = tcg_temp_new_i64(); \
120 t1 = tcg_temp_new_i64(); \
121 t2 = tcg_temp_new_i64(); \
122 gen_reset_fpstatus(); \
123 get_fpr(t0, rA(ctx->opcode)); \
124 get_fpr(t1, rC(ctx->opcode)); \
125 gen_helper_f##op(t2, cpu_env, t0, t1); \
127 gen_helper_frsp(t2, cpu_env, t2); \
129 set_fpr(rD(ctx->opcode), t2); \
131 gen_compute_fprf_float64(t2); \
133 if (unlikely(Rc(ctx->opcode) != 0)) { \
134 gen_set_cr1_from_fpscr(ctx); \
136 tcg_temp_free_i64(t0); \
137 tcg_temp_free_i64(t1); \
138 tcg_temp_free_i64(t2); \
140 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
141 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
142 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
144 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
145 static void gen_f##name(DisasContext *ctx) \
149 if (unlikely(!ctx->fpu_enabled)) { \
150 gen_exception(ctx, POWERPC_EXCP_FPU); \
153 t0 = tcg_temp_new_i64(); \
154 t1 = tcg_temp_new_i64(); \
155 gen_reset_fpstatus(); \
156 get_fpr(t0, rB(ctx->opcode)); \
157 gen_helper_f##name(t1, cpu_env, t0); \
158 set_fpr(rD(ctx->opcode), t1); \
160 gen_compute_fprf_float64(t1); \
162 if (unlikely(Rc(ctx->opcode) != 0)) { \
163 gen_set_cr1_from_fpscr(ctx); \
165 tcg_temp_free_i64(t0); \
166 tcg_temp_free_i64(t1); \
169 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
170 static void gen_f##name(DisasContext *ctx) \
174 if (unlikely(!ctx->fpu_enabled)) { \
175 gen_exception(ctx, POWERPC_EXCP_FPU); \
178 t0 = tcg_temp_new_i64(); \
179 t1 = tcg_temp_new_i64(); \
180 gen_reset_fpstatus(); \
181 get_fpr(t0, rB(ctx->opcode)); \
182 gen_helper_f##name(t1, cpu_env, t0); \
183 set_fpr(rD(ctx->opcode), t1); \
185 gen_compute_fprf_float64(t1); \
187 if (unlikely(Rc(ctx->opcode) != 0)) { \
188 gen_set_cr1_from_fpscr(ctx); \
190 tcg_temp_free_i64(t0); \
191 tcg_temp_free_i64(t1); \
195 GEN_FLOAT_AB(add
, 0x15, 0x000007C0, 1, PPC_FLOAT
);
197 GEN_FLOAT_AB(div
, 0x12, 0x000007C0, 1, PPC_FLOAT
);
199 GEN_FLOAT_AC(mul
, 0x19, 0x0000F800, 1, PPC_FLOAT
);
202 GEN_FLOAT_BS(re
, 0x3F, 0x18, 1, PPC_FLOAT_EXT
);
205 GEN_FLOAT_BS(res
, 0x3B, 0x18, 1, PPC_FLOAT_FRES
);
208 GEN_FLOAT_BS(rsqrte
, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE
);
211 static void gen_frsqrtes(DisasContext
*ctx
)
215 if (unlikely(!ctx
->fpu_enabled
)) {
216 gen_exception(ctx
, POWERPC_EXCP_FPU
);
219 t0
= tcg_temp_new_i64();
220 t1
= tcg_temp_new_i64();
221 gen_reset_fpstatus();
222 get_fpr(t0
, rB(ctx
->opcode
));
223 gen_helper_frsqrte(t1
, cpu_env
, t0
);
224 gen_helper_frsp(t1
, cpu_env
, t1
);
225 set_fpr(rD(ctx
->opcode
), t1
);
226 gen_compute_fprf_float64(t1
);
227 if (unlikely(Rc(ctx
->opcode
) != 0)) {
228 gen_set_cr1_from_fpscr(ctx
);
230 tcg_temp_free_i64(t0
);
231 tcg_temp_free_i64(t1
);
235 _GEN_FLOAT_ACB(sel
, sel
, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL
);
237 GEN_FLOAT_AB(sub
, 0x14, 0x000007C0, 1, PPC_FLOAT
);
241 static void gen_fsqrt(DisasContext
*ctx
)
245 if (unlikely(!ctx
->fpu_enabled
)) {
246 gen_exception(ctx
, POWERPC_EXCP_FPU
);
249 t0
= tcg_temp_new_i64();
250 t1
= tcg_temp_new_i64();
251 gen_reset_fpstatus();
252 get_fpr(t0
, rB(ctx
->opcode
));
253 gen_helper_fsqrt(t1
, cpu_env
, t0
);
254 set_fpr(rD(ctx
->opcode
), t1
);
255 gen_compute_fprf_float64(t1
);
256 if (unlikely(Rc(ctx
->opcode
) != 0)) {
257 gen_set_cr1_from_fpscr(ctx
);
259 tcg_temp_free_i64(t0
);
260 tcg_temp_free_i64(t1
);
263 static void gen_fsqrts(DisasContext
*ctx
)
267 if (unlikely(!ctx
->fpu_enabled
)) {
268 gen_exception(ctx
, POWERPC_EXCP_FPU
);
271 t0
= tcg_temp_new_i64();
272 t1
= tcg_temp_new_i64();
273 gen_reset_fpstatus();
274 get_fpr(t0
, rB(ctx
->opcode
));
275 gen_helper_fsqrt(t1
, cpu_env
, t0
);
276 gen_helper_frsp(t1
, cpu_env
, t1
);
277 set_fpr(rD(ctx
->opcode
), t1
);
278 gen_compute_fprf_float64(t1
);
279 if (unlikely(Rc(ctx
->opcode
) != 0)) {
280 gen_set_cr1_from_fpscr(ctx
);
282 tcg_temp_free_i64(t0
);
283 tcg_temp_free_i64(t1
);
286 /*** Floating-Point multiply-and-add ***/
288 GEN_FLOAT_ACB(madd
, 0x1D, 1, PPC_FLOAT
);
290 GEN_FLOAT_ACB(msub
, 0x1C, 1, PPC_FLOAT
);
291 /* fnmadd - fnmadds */
292 GEN_FLOAT_ACB(nmadd
, 0x1F, 1, PPC_FLOAT
);
293 /* fnmsub - fnmsubs */
294 GEN_FLOAT_ACB(nmsub
, 0x1E, 1, PPC_FLOAT
);
296 /*** Floating-Point round & convert ***/
298 GEN_FLOAT_B(ctiw
, 0x0E, 0x00, 0, PPC_FLOAT
);
300 GEN_FLOAT_B(ctiwu
, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206
);
302 GEN_FLOAT_B(ctiwz
, 0x0F, 0x00, 0, PPC_FLOAT
);
304 GEN_FLOAT_B(ctiwuz
, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206
);
306 GEN_FLOAT_B(rsp
, 0x0C, 0x00, 1, PPC_FLOAT
);
308 GEN_FLOAT_B(cfid
, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64
);
310 GEN_FLOAT_B(cfids
, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206
);
312 GEN_FLOAT_B(cfidu
, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206
);
314 GEN_FLOAT_B(cfidus
, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206
);
316 GEN_FLOAT_B(ctid
, 0x0E, 0x19, 0, PPC2_FP_CVT_S64
);
318 GEN_FLOAT_B(ctidu
, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206
);
320 GEN_FLOAT_B(ctidz
, 0x0F, 0x19, 0, PPC2_FP_CVT_S64
);
322 GEN_FLOAT_B(ctiduz
, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206
);
325 GEN_FLOAT_B(rin
, 0x08, 0x0C, 1, PPC_FLOAT_EXT
);
327 GEN_FLOAT_B(riz
, 0x08, 0x0D, 1, PPC_FLOAT_EXT
);
329 GEN_FLOAT_B(rip
, 0x08, 0x0E, 1, PPC_FLOAT_EXT
);
331 GEN_FLOAT_B(rim
, 0x08, 0x0F, 1, PPC_FLOAT_EXT
);
333 static void gen_ftdiv(DisasContext
*ctx
)
337 if (unlikely(!ctx
->fpu_enabled
)) {
338 gen_exception(ctx
, POWERPC_EXCP_FPU
);
341 t0
= tcg_temp_new_i64();
342 t1
= tcg_temp_new_i64();
343 get_fpr(t0
, rA(ctx
->opcode
));
344 get_fpr(t1
, rB(ctx
->opcode
));
345 gen_helper_ftdiv(cpu_crf
[crfD(ctx
->opcode
)], t0
, t1
);
346 tcg_temp_free_i64(t0
);
347 tcg_temp_free_i64(t1
);
350 static void gen_ftsqrt(DisasContext
*ctx
)
353 if (unlikely(!ctx
->fpu_enabled
)) {
354 gen_exception(ctx
, POWERPC_EXCP_FPU
);
357 t0
= tcg_temp_new_i64();
358 get_fpr(t0
, rB(ctx
->opcode
));
359 gen_helper_ftsqrt(cpu_crf
[crfD(ctx
->opcode
)], t0
);
360 tcg_temp_free_i64(t0
);
365 /*** Floating-Point compare ***/
368 static void gen_fcmpo(DisasContext
*ctx
)
373 if (unlikely(!ctx
->fpu_enabled
)) {
374 gen_exception(ctx
, POWERPC_EXCP_FPU
);
377 t0
= tcg_temp_new_i64();
378 t1
= tcg_temp_new_i64();
379 gen_reset_fpstatus();
380 crf
= tcg_const_i32(crfD(ctx
->opcode
));
381 get_fpr(t0
, rA(ctx
->opcode
));
382 get_fpr(t1
, rB(ctx
->opcode
));
383 gen_helper_fcmpo(cpu_env
, t0
, t1
, crf
);
384 tcg_temp_free_i32(crf
);
385 gen_helper_float_check_status(cpu_env
);
386 tcg_temp_free_i64(t0
);
387 tcg_temp_free_i64(t1
);
391 static void gen_fcmpu(DisasContext
*ctx
)
396 if (unlikely(!ctx
->fpu_enabled
)) {
397 gen_exception(ctx
, POWERPC_EXCP_FPU
);
400 t0
= tcg_temp_new_i64();
401 t1
= tcg_temp_new_i64();
402 gen_reset_fpstatus();
403 crf
= tcg_const_i32(crfD(ctx
->opcode
));
404 get_fpr(t0
, rA(ctx
->opcode
));
405 get_fpr(t1
, rB(ctx
->opcode
));
406 gen_helper_fcmpu(cpu_env
, t0
, t1
, crf
);
407 tcg_temp_free_i32(crf
);
408 gen_helper_float_check_status(cpu_env
);
409 tcg_temp_free_i64(t0
);
410 tcg_temp_free_i64(t1
);
413 /*** Floating-point move ***/
415 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
416 static void gen_fabs(DisasContext
*ctx
)
420 if (unlikely(!ctx
->fpu_enabled
)) {
421 gen_exception(ctx
, POWERPC_EXCP_FPU
);
424 t0
= tcg_temp_new_i64();
425 t1
= tcg_temp_new_i64();
426 get_fpr(t0
, rB(ctx
->opcode
));
427 tcg_gen_andi_i64(t1
, t0
, ~(1ULL << 63));
428 set_fpr(rD(ctx
->opcode
), t1
);
429 if (unlikely(Rc(ctx
->opcode
))) {
430 gen_set_cr1_from_fpscr(ctx
);
432 tcg_temp_free_i64(t0
);
433 tcg_temp_free_i64(t1
);
437 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
438 static void gen_fmr(DisasContext
*ctx
)
441 if (unlikely(!ctx
->fpu_enabled
)) {
442 gen_exception(ctx
, POWERPC_EXCP_FPU
);
445 t0
= tcg_temp_new_i64();
446 get_fpr(t0
, rB(ctx
->opcode
));
447 set_fpr(rD(ctx
->opcode
), t0
);
448 if (unlikely(Rc(ctx
->opcode
))) {
449 gen_set_cr1_from_fpscr(ctx
);
451 tcg_temp_free_i64(t0
);
455 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
456 static void gen_fnabs(DisasContext
*ctx
)
460 if (unlikely(!ctx
->fpu_enabled
)) {
461 gen_exception(ctx
, POWERPC_EXCP_FPU
);
464 t0
= tcg_temp_new_i64();
465 t1
= tcg_temp_new_i64();
466 get_fpr(t0
, rB(ctx
->opcode
));
467 tcg_gen_ori_i64(t1
, t0
, 1ULL << 63);
468 set_fpr(rD(ctx
->opcode
), t1
);
469 if (unlikely(Rc(ctx
->opcode
))) {
470 gen_set_cr1_from_fpscr(ctx
);
472 tcg_temp_free_i64(t0
);
473 tcg_temp_free_i64(t1
);
477 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
478 static void gen_fneg(DisasContext
*ctx
)
482 if (unlikely(!ctx
->fpu_enabled
)) {
483 gen_exception(ctx
, POWERPC_EXCP_FPU
);
486 t0
= tcg_temp_new_i64();
487 t1
= tcg_temp_new_i64();
488 get_fpr(t0
, rB(ctx
->opcode
));
489 tcg_gen_xori_i64(t1
, t0
, 1ULL << 63);
490 set_fpr(rD(ctx
->opcode
), t1
);
491 if (unlikely(Rc(ctx
->opcode
))) {
492 gen_set_cr1_from_fpscr(ctx
);
494 tcg_temp_free_i64(t0
);
495 tcg_temp_free_i64(t1
);
498 /* fcpsgn: PowerPC 2.05 specification */
499 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
500 static void gen_fcpsgn(DisasContext
*ctx
)
505 if (unlikely(!ctx
->fpu_enabled
)) {
506 gen_exception(ctx
, POWERPC_EXCP_FPU
);
509 t0
= tcg_temp_new_i64();
510 t1
= tcg_temp_new_i64();
511 t2
= tcg_temp_new_i64();
512 get_fpr(t0
, rA(ctx
->opcode
));
513 get_fpr(t1
, rB(ctx
->opcode
));
514 tcg_gen_deposit_i64(t2
, t0
, t1
, 0, 63);
515 set_fpr(rD(ctx
->opcode
), t2
);
516 if (unlikely(Rc(ctx
->opcode
))) {
517 gen_set_cr1_from_fpscr(ctx
);
519 tcg_temp_free_i64(t0
);
520 tcg_temp_free_i64(t1
);
521 tcg_temp_free_i64(t2
);
524 static void gen_fmrgew(DisasContext
*ctx
)
529 if (unlikely(!ctx
->fpu_enabled
)) {
530 gen_exception(ctx
, POWERPC_EXCP_FPU
);
533 b0
= tcg_temp_new_i64();
534 t0
= tcg_temp_new_i64();
535 t1
= tcg_temp_new_i64();
536 get_fpr(t0
, rB(ctx
->opcode
));
537 tcg_gen_shri_i64(b0
, t0
, 32);
538 get_fpr(t0
, rA(ctx
->opcode
));
539 tcg_gen_deposit_i64(t1
, t0
, b0
, 0, 32);
540 set_fpr(rD(ctx
->opcode
), t1
);
541 tcg_temp_free_i64(b0
);
542 tcg_temp_free_i64(t0
);
543 tcg_temp_free_i64(t1
);
546 static void gen_fmrgow(DisasContext
*ctx
)
551 if (unlikely(!ctx
->fpu_enabled
)) {
552 gen_exception(ctx
, POWERPC_EXCP_FPU
);
555 t0
= tcg_temp_new_i64();
556 t1
= tcg_temp_new_i64();
557 t2
= tcg_temp_new_i64();
558 get_fpr(t0
, rB(ctx
->opcode
));
559 get_fpr(t1
, rA(ctx
->opcode
));
560 tcg_gen_deposit_i64(t2
, t0
, t1
, 32, 32);
561 set_fpr(rD(ctx
->opcode
), t2
);
562 tcg_temp_free_i64(t0
);
563 tcg_temp_free_i64(t1
);
564 tcg_temp_free_i64(t2
);
567 /*** Floating-Point status & ctrl register ***/
570 static void gen_mcrfs(DisasContext
*ctx
)
572 TCGv tmp
= tcg_temp_new();
574 TCGv_i64 tnew_fpscr
= tcg_temp_new_i64();
579 if (unlikely(!ctx
->fpu_enabled
)) {
580 gen_exception(ctx
, POWERPC_EXCP_FPU
);
583 bfa
= crfS(ctx
->opcode
);
586 tcg_gen_shri_tl(tmp
, cpu_fpscr
, shift
);
587 tcg_gen_trunc_tl_i32(cpu_crf
[crfD(ctx
->opcode
)], tmp
);
588 tcg_gen_andi_i32(cpu_crf
[crfD(ctx
->opcode
)], cpu_crf
[crfD(ctx
->opcode
)],
591 tcg_gen_extu_tl_i64(tnew_fpscr
, cpu_fpscr
);
592 /* Only the exception bits (including FX) should be cleared if read */
593 tcg_gen_andi_i64(tnew_fpscr
, tnew_fpscr
,
594 ~((0xF << shift
) & FP_EX_CLEAR_BITS
));
595 /* FEX and VX need to be updated, so don't set fpscr directly */
596 tmask
= tcg_const_i32(1 << nibble
);
597 gen_helper_store_fpscr(cpu_env
, tnew_fpscr
, tmask
);
598 tcg_temp_free_i32(tmask
);
599 tcg_temp_free_i64(tnew_fpscr
);
603 static void gen_mffs(DisasContext
*ctx
)
606 if (unlikely(!ctx
->fpu_enabled
)) {
607 gen_exception(ctx
, POWERPC_EXCP_FPU
);
610 t0
= tcg_temp_new_i64();
611 gen_reset_fpstatus();
612 tcg_gen_extu_tl_i64(t0
, cpu_fpscr
);
613 set_fpr(rD(ctx
->opcode
), t0
);
614 if (unlikely(Rc(ctx
->opcode
))) {
615 gen_set_cr1_from_fpscr(ctx
);
617 tcg_temp_free_i64(t0
);
621 static void gen_mffsl(DisasContext
*ctx
)
625 if (unlikely(!(ctx
->insns_flags2
& PPC2_ISA300
))) {
626 return gen_mffs(ctx
);
629 if (unlikely(!ctx
->fpu_enabled
)) {
630 gen_exception(ctx
, POWERPC_EXCP_FPU
);
633 t0
= tcg_temp_new_i64();
634 gen_reset_fpstatus();
635 tcg_gen_extu_tl_i64(t0
, cpu_fpscr
);
636 /* Mask everything except mode, status, and enables. */
637 tcg_gen_andi_i64(t0
, t0
, FP_DRN
| FP_STATUS
| FP_ENABLES
| FP_RN
);
638 set_fpr(rD(ctx
->opcode
), t0
);
639 tcg_temp_free_i64(t0
);
642 static void gen_helper_mffscrn(DisasContext
*ctx
, TCGv_i64 t1
)
644 TCGv_i64 t0
= tcg_temp_new_i64();
645 TCGv_i32 mask
= tcg_const_i32(0x0001);
647 gen_reset_fpstatus();
648 tcg_gen_extu_tl_i64(t0
, cpu_fpscr
);
649 tcg_gen_andi_i64(t0
, t0
, FP_DRN
| FP_ENABLES
| FP_RN
);
650 set_fpr(rD(ctx
->opcode
), t0
);
652 /* Mask FPSCR value to clear RN. */
653 tcg_gen_andi_i64(t0
, t0
, ~FP_RN
);
655 /* Merge RN into FPSCR value. */
656 tcg_gen_or_i64(t0
, t0
, t1
);
658 gen_helper_store_fpscr(cpu_env
, t0
, mask
);
660 tcg_temp_free_i32(mask
);
661 tcg_temp_free_i64(t0
);
665 static void gen_mffscrn(DisasContext
*ctx
)
669 if (unlikely(!(ctx
->insns_flags2
& PPC2_ISA300
))) {
670 return gen_mffs(ctx
);
673 if (unlikely(!ctx
->fpu_enabled
)) {
674 gen_exception(ctx
, POWERPC_EXCP_FPU
);
678 t1
= tcg_temp_new_i64();
679 get_fpr(t1
, rB(ctx
->opcode
));
680 /* Mask FRB to get just RN. */
681 tcg_gen_andi_i64(t1
, t1
, FP_RN
);
683 gen_helper_mffscrn(ctx
, t1
);
685 tcg_temp_free_i64(t1
);
689 static void gen_mffscrni(DisasContext
*ctx
)
693 if (unlikely(!(ctx
->insns_flags2
& PPC2_ISA300
))) {
694 return gen_mffs(ctx
);
697 if (unlikely(!ctx
->fpu_enabled
)) {
698 gen_exception(ctx
, POWERPC_EXCP_FPU
);
702 t1
= tcg_const_i64((uint64_t)RM(ctx
->opcode
));
704 gen_helper_mffscrn(ctx
, t1
);
706 tcg_temp_free_i64(t1
);
710 static void gen_mtfsb0(DisasContext
*ctx
)
714 if (unlikely(!ctx
->fpu_enabled
)) {
715 gen_exception(ctx
, POWERPC_EXCP_FPU
);
718 crb
= 31 - crbD(ctx
->opcode
);
719 gen_reset_fpstatus();
720 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
)) {
722 t0
= tcg_const_i32(crb
);
723 gen_helper_fpscr_clrbit(cpu_env
, t0
);
724 tcg_temp_free_i32(t0
);
726 if (unlikely(Rc(ctx
->opcode
) != 0)) {
727 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
728 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
733 static void gen_mtfsb1(DisasContext
*ctx
)
737 if (unlikely(!ctx
->fpu_enabled
)) {
738 gen_exception(ctx
, POWERPC_EXCP_FPU
);
741 crb
= 31 - crbD(ctx
->opcode
);
742 gen_reset_fpstatus();
743 /* XXX: we pretend we can only do IEEE floating-point computations */
744 if (likely(crb
!= FPSCR_FEX
&& crb
!= FPSCR_VX
&& crb
!= FPSCR_NI
)) {
746 t0
= tcg_const_i32(crb
);
747 gen_helper_fpscr_setbit(cpu_env
, t0
);
748 tcg_temp_free_i32(t0
);
750 if (unlikely(Rc(ctx
->opcode
) != 0)) {
751 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
752 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
754 /* We can raise a differed exception */
755 gen_helper_float_check_status(cpu_env
);
759 static void gen_mtfsf(DisasContext
*ctx
)
765 if (unlikely(!ctx
->fpu_enabled
)) {
766 gen_exception(ctx
, POWERPC_EXCP_FPU
);
769 flm
= FPFLM(ctx
->opcode
);
770 l
= FPL(ctx
->opcode
);
771 w
= FPW(ctx
->opcode
);
772 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
773 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
776 gen_reset_fpstatus();
778 t0
= tcg_const_i32((ctx
->insns_flags2
& PPC2_ISA205
) ? 0xffff : 0xff);
780 t0
= tcg_const_i32(flm
<< (w
* 8));
782 t1
= tcg_temp_new_i64();
783 get_fpr(t1
, rB(ctx
->opcode
));
784 gen_helper_store_fpscr(cpu_env
, t1
, t0
);
785 tcg_temp_free_i32(t0
);
786 if (unlikely(Rc(ctx
->opcode
) != 0)) {
787 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
788 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
790 /* We can raise a differed exception */
791 gen_helper_float_check_status(cpu_env
);
792 tcg_temp_free_i64(t1
);
796 static void gen_mtfsfi(DisasContext
*ctx
)
802 if (unlikely(!ctx
->fpu_enabled
)) {
803 gen_exception(ctx
, POWERPC_EXCP_FPU
);
806 w
= FPW(ctx
->opcode
);
807 bf
= FPBF(ctx
->opcode
);
808 if (unlikely(w
& !(ctx
->insns_flags2
& PPC2_ISA205
))) {
809 gen_inval_exception(ctx
, POWERPC_EXCP_INVAL_INVAL
);
812 sh
= (8 * w
) + 7 - bf
;
813 gen_reset_fpstatus();
814 t0
= tcg_const_i64(((uint64_t)FPIMM(ctx
->opcode
)) << (4 * sh
));
815 t1
= tcg_const_i32(1 << sh
);
816 gen_helper_store_fpscr(cpu_env
, t0
, t1
);
817 tcg_temp_free_i64(t0
);
818 tcg_temp_free_i32(t1
);
819 if (unlikely(Rc(ctx
->opcode
) != 0)) {
820 tcg_gen_trunc_tl_i32(cpu_crf
[1], cpu_fpscr
);
821 tcg_gen_shri_i32(cpu_crf
[1], cpu_crf
[1], FPSCR_OX
);
823 /* We can raise a differed exception */
824 gen_helper_float_check_status(cpu_env
);
827 /*** Floating-point load ***/
828 #define GEN_LDF(name, ldop, opc, type) \
829 static void glue(gen_, name)(DisasContext *ctx) \
833 if (unlikely(!ctx->fpu_enabled)) { \
834 gen_exception(ctx, POWERPC_EXCP_FPU); \
837 gen_set_access_type(ctx, ACCESS_FLOAT); \
838 EA = tcg_temp_new(); \
839 t0 = tcg_temp_new_i64(); \
840 gen_addr_imm_index(ctx, EA, 0); \
841 gen_qemu_##ldop(ctx, t0, EA); \
842 set_fpr(rD(ctx->opcode), t0); \
844 tcg_temp_free_i64(t0); \
847 #define GEN_LDUF(name, ldop, opc, type) \
848 static void glue(gen_, name##u)(DisasContext *ctx) \
852 if (unlikely(!ctx->fpu_enabled)) { \
853 gen_exception(ctx, POWERPC_EXCP_FPU); \
856 if (unlikely(rA(ctx->opcode) == 0)) { \
857 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
860 gen_set_access_type(ctx, ACCESS_FLOAT); \
861 EA = tcg_temp_new(); \
862 t0 = tcg_temp_new_i64(); \
863 gen_addr_imm_index(ctx, EA, 0); \
864 gen_qemu_##ldop(ctx, t0, EA); \
865 set_fpr(rD(ctx->opcode), t0); \
866 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
868 tcg_temp_free_i64(t0); \
871 #define GEN_LDUXF(name, ldop, opc, type) \
872 static void glue(gen_, name##ux)(DisasContext *ctx) \
876 if (unlikely(!ctx->fpu_enabled)) { \
877 gen_exception(ctx, POWERPC_EXCP_FPU); \
880 t0 = tcg_temp_new_i64(); \
881 if (unlikely(rA(ctx->opcode) == 0)) { \
882 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
885 gen_set_access_type(ctx, ACCESS_FLOAT); \
886 EA = tcg_temp_new(); \
887 gen_addr_reg_index(ctx, EA); \
888 gen_qemu_##ldop(ctx, t0, EA); \
889 set_fpr(rD(ctx->opcode), t0); \
890 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
892 tcg_temp_free_i64(t0); \
895 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
896 static void glue(gen_, name##x)(DisasContext *ctx) \
900 if (unlikely(!ctx->fpu_enabled)) { \
901 gen_exception(ctx, POWERPC_EXCP_FPU); \
904 gen_set_access_type(ctx, ACCESS_FLOAT); \
905 EA = tcg_temp_new(); \
906 t0 = tcg_temp_new_i64(); \
907 gen_addr_reg_index(ctx, EA); \
908 gen_qemu_##ldop(ctx, t0, EA); \
909 set_fpr(rD(ctx->opcode), t0); \
911 tcg_temp_free_i64(t0); \
914 #define GEN_LDFS(name, ldop, op, type) \
915 GEN_LDF(name, ldop, op | 0x20, type); \
916 GEN_LDUF(name, ldop, op | 0x21, type); \
917 GEN_LDUXF(name, ldop, op | 0x01, type); \
918 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
920 static void gen_qemu_ld32fs(DisasContext
*ctx
, TCGv_i64 dest
, TCGv addr
)
922 TCGv_i32 tmp
= tcg_temp_new_i32();
923 tcg_gen_qemu_ld_i32(tmp
, addr
, ctx
->mem_idx
, DEF_MEMOP(MO_UL
));
924 gen_helper_todouble(dest
, tmp
);
925 tcg_temp_free_i32(tmp
);
928 /* lfd lfdu lfdux lfdx */
929 GEN_LDFS(lfd
, ld64_i64
, 0x12, PPC_FLOAT
);
930 /* lfs lfsu lfsux lfsx */
931 GEN_LDFS(lfs
, ld32fs
, 0x10, PPC_FLOAT
);
933 /* lfdepx (external PID lfdx) */
934 static void gen_lfdepx(DisasContext
*ctx
)
939 if (unlikely(!ctx
->fpu_enabled
)) {
940 gen_exception(ctx
, POWERPC_EXCP_FPU
);
943 gen_set_access_type(ctx
, ACCESS_FLOAT
);
945 t0
= tcg_temp_new_i64();
946 gen_addr_reg_index(ctx
, EA
);
947 tcg_gen_qemu_ld_i64(t0
, EA
, PPC_TLB_EPID_LOAD
, DEF_MEMOP(MO_Q
));
948 set_fpr(rD(ctx
->opcode
), t0
);
950 tcg_temp_free_i64(t0
);
954 static void gen_lfdp(DisasContext
*ctx
)
958 if (unlikely(!ctx
->fpu_enabled
)) {
959 gen_exception(ctx
, POWERPC_EXCP_FPU
);
962 gen_set_access_type(ctx
, ACCESS_FLOAT
);
964 gen_addr_imm_index(ctx
, EA
, 0);
965 t0
= tcg_temp_new_i64();
967 * We only need to swap high and low halves. gen_qemu_ld64_i64
968 * does necessary 64-bit byteswap already.
970 if (unlikely(ctx
->le_mode
)) {
971 gen_qemu_ld64_i64(ctx
, t0
, EA
);
972 set_fpr(rD(ctx
->opcode
) + 1, t0
);
973 tcg_gen_addi_tl(EA
, EA
, 8);
974 gen_qemu_ld64_i64(ctx
, t0
, EA
);
975 set_fpr(rD(ctx
->opcode
), t0
);
977 gen_qemu_ld64_i64(ctx
, t0
, EA
);
978 set_fpr(rD(ctx
->opcode
), t0
);
979 tcg_gen_addi_tl(EA
, EA
, 8);
980 gen_qemu_ld64_i64(ctx
, t0
, EA
);
981 set_fpr(rD(ctx
->opcode
) + 1, t0
);
984 tcg_temp_free_i64(t0
);
988 static void gen_lfdpx(DisasContext
*ctx
)
992 if (unlikely(!ctx
->fpu_enabled
)) {
993 gen_exception(ctx
, POWERPC_EXCP_FPU
);
996 gen_set_access_type(ctx
, ACCESS_FLOAT
);
998 gen_addr_reg_index(ctx
, EA
);
999 t0
= tcg_temp_new_i64();
1001 * We only need to swap high and low halves. gen_qemu_ld64_i64
1002 * does necessary 64-bit byteswap already.
1004 if (unlikely(ctx
->le_mode
)) {
1005 gen_qemu_ld64_i64(ctx
, t0
, EA
);
1006 set_fpr(rD(ctx
->opcode
) + 1, t0
);
1007 tcg_gen_addi_tl(EA
, EA
, 8);
1008 gen_qemu_ld64_i64(ctx
, t0
, EA
);
1009 set_fpr(rD(ctx
->opcode
), t0
);
1011 gen_qemu_ld64_i64(ctx
, t0
, EA
);
1012 set_fpr(rD(ctx
->opcode
), t0
);
1013 tcg_gen_addi_tl(EA
, EA
, 8);
1014 gen_qemu_ld64_i64(ctx
, t0
, EA
);
1015 set_fpr(rD(ctx
->opcode
) + 1, t0
);
1018 tcg_temp_free_i64(t0
);
1022 static void gen_lfiwax(DisasContext
*ctx
)
1027 if (unlikely(!ctx
->fpu_enabled
)) {
1028 gen_exception(ctx
, POWERPC_EXCP_FPU
);
1031 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1032 EA
= tcg_temp_new();
1033 t0
= tcg_temp_new();
1034 t1
= tcg_temp_new_i64();
1035 gen_addr_reg_index(ctx
, EA
);
1036 gen_qemu_ld32s(ctx
, t0
, EA
);
1037 tcg_gen_ext_tl_i64(t1
, t0
);
1038 set_fpr(rD(ctx
->opcode
), t1
);
1041 tcg_temp_free_i64(t1
);
1045 static void gen_lfiwzx(DisasContext
*ctx
)
1049 if (unlikely(!ctx
->fpu_enabled
)) {
1050 gen_exception(ctx
, POWERPC_EXCP_FPU
);
1053 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1054 EA
= tcg_temp_new();
1055 t0
= tcg_temp_new_i64();
1056 gen_addr_reg_index(ctx
, EA
);
1057 gen_qemu_ld32u_i64(ctx
, t0
, EA
);
1058 set_fpr(rD(ctx
->opcode
), t0
);
1060 tcg_temp_free_i64(t0
);
1062 /*** Floating-point store ***/
1063 #define GEN_STF(name, stop, opc, type) \
1064 static void glue(gen_, name)(DisasContext *ctx) \
1068 if (unlikely(!ctx->fpu_enabled)) { \
1069 gen_exception(ctx, POWERPC_EXCP_FPU); \
1072 gen_set_access_type(ctx, ACCESS_FLOAT); \
1073 EA = tcg_temp_new(); \
1074 t0 = tcg_temp_new_i64(); \
1075 gen_addr_imm_index(ctx, EA, 0); \
1076 get_fpr(t0, rS(ctx->opcode)); \
1077 gen_qemu_##stop(ctx, t0, EA); \
1078 tcg_temp_free(EA); \
1079 tcg_temp_free_i64(t0); \
1082 #define GEN_STUF(name, stop, opc, type) \
1083 static void glue(gen_, name##u)(DisasContext *ctx) \
1087 if (unlikely(!ctx->fpu_enabled)) { \
1088 gen_exception(ctx, POWERPC_EXCP_FPU); \
1091 if (unlikely(rA(ctx->opcode) == 0)) { \
1092 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1095 gen_set_access_type(ctx, ACCESS_FLOAT); \
1096 EA = tcg_temp_new(); \
1097 t0 = tcg_temp_new_i64(); \
1098 gen_addr_imm_index(ctx, EA, 0); \
1099 get_fpr(t0, rS(ctx->opcode)); \
1100 gen_qemu_##stop(ctx, t0, EA); \
1101 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1102 tcg_temp_free(EA); \
1103 tcg_temp_free_i64(t0); \
1106 #define GEN_STUXF(name, stop, opc, type) \
1107 static void glue(gen_, name##ux)(DisasContext *ctx) \
1111 if (unlikely(!ctx->fpu_enabled)) { \
1112 gen_exception(ctx, POWERPC_EXCP_FPU); \
1115 if (unlikely(rA(ctx->opcode) == 0)) { \
1116 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1119 gen_set_access_type(ctx, ACCESS_FLOAT); \
1120 EA = tcg_temp_new(); \
1121 t0 = tcg_temp_new_i64(); \
1122 gen_addr_reg_index(ctx, EA); \
1123 get_fpr(t0, rS(ctx->opcode)); \
1124 gen_qemu_##stop(ctx, t0, EA); \
1125 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1126 tcg_temp_free(EA); \
1127 tcg_temp_free_i64(t0); \
1130 #define GEN_STXF(name, stop, opc2, opc3, type) \
1131 static void glue(gen_, name##x)(DisasContext *ctx) \
1135 if (unlikely(!ctx->fpu_enabled)) { \
1136 gen_exception(ctx, POWERPC_EXCP_FPU); \
1139 gen_set_access_type(ctx, ACCESS_FLOAT); \
1140 EA = tcg_temp_new(); \
1141 t0 = tcg_temp_new_i64(); \
1142 gen_addr_reg_index(ctx, EA); \
1143 get_fpr(t0, rS(ctx->opcode)); \
1144 gen_qemu_##stop(ctx, t0, EA); \
1145 tcg_temp_free(EA); \
1146 tcg_temp_free_i64(t0); \
1149 #define GEN_STFS(name, stop, op, type) \
1150 GEN_STF(name, stop, op | 0x20, type); \
1151 GEN_STUF(name, stop, op | 0x21, type); \
1152 GEN_STUXF(name, stop, op | 0x01, type); \
1153 GEN_STXF(name, stop, 0x17, op | 0x00, type)
1155 static void gen_qemu_st32fs(DisasContext
*ctx
, TCGv_i64 src
, TCGv addr
)
1157 TCGv_i32 tmp
= tcg_temp_new_i32();
1158 gen_helper_tosingle(tmp
, src
);
1159 tcg_gen_qemu_st_i32(tmp
, addr
, ctx
->mem_idx
, DEF_MEMOP(MO_UL
));
1160 tcg_temp_free_i32(tmp
);
1163 /* stfd stfdu stfdux stfdx */
1164 GEN_STFS(stfd
, st64_i64
, 0x16, PPC_FLOAT
);
1165 /* stfs stfsu stfsux stfsx */
1166 GEN_STFS(stfs
, st32fs
, 0x14, PPC_FLOAT
);
1168 /* stfdepx (external PID lfdx) */
1169 static void gen_stfdepx(DisasContext
*ctx
)
1174 if (unlikely(!ctx
->fpu_enabled
)) {
1175 gen_exception(ctx
, POWERPC_EXCP_FPU
);
1178 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1179 EA
= tcg_temp_new();
1180 t0
= tcg_temp_new_i64();
1181 gen_addr_reg_index(ctx
, EA
);
1182 get_fpr(t0
, rD(ctx
->opcode
));
1183 tcg_gen_qemu_st_i64(t0
, EA
, PPC_TLB_EPID_STORE
, DEF_MEMOP(MO_Q
));
1185 tcg_temp_free_i64(t0
);
1189 static void gen_stfdp(DisasContext
*ctx
)
1193 if (unlikely(!ctx
->fpu_enabled
)) {
1194 gen_exception(ctx
, POWERPC_EXCP_FPU
);
1197 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1198 EA
= tcg_temp_new();
1199 t0
= tcg_temp_new_i64();
1200 gen_addr_imm_index(ctx
, EA
, 0);
1202 * We only need to swap high and low halves. gen_qemu_st64_i64
1203 * does necessary 64-bit byteswap already.
1205 if (unlikely(ctx
->le_mode
)) {
1206 get_fpr(t0
, rD(ctx
->opcode
) + 1);
1207 gen_qemu_st64_i64(ctx
, t0
, EA
);
1208 tcg_gen_addi_tl(EA
, EA
, 8);
1209 get_fpr(t0
, rD(ctx
->opcode
));
1210 gen_qemu_st64_i64(ctx
, t0
, EA
);
1212 get_fpr(t0
, rD(ctx
->opcode
));
1213 gen_qemu_st64_i64(ctx
, t0
, EA
);
1214 tcg_gen_addi_tl(EA
, EA
, 8);
1215 get_fpr(t0
, rD(ctx
->opcode
) + 1);
1216 gen_qemu_st64_i64(ctx
, t0
, EA
);
1219 tcg_temp_free_i64(t0
);
1223 static void gen_stfdpx(DisasContext
*ctx
)
1227 if (unlikely(!ctx
->fpu_enabled
)) {
1228 gen_exception(ctx
, POWERPC_EXCP_FPU
);
1231 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1232 EA
= tcg_temp_new();
1233 t0
= tcg_temp_new_i64();
1234 gen_addr_reg_index(ctx
, EA
);
1236 * We only need to swap high and low halves. gen_qemu_st64_i64
1237 * does necessary 64-bit byteswap already.
1239 if (unlikely(ctx
->le_mode
)) {
1240 get_fpr(t0
, rD(ctx
->opcode
) + 1);
1241 gen_qemu_st64_i64(ctx
, t0
, EA
);
1242 tcg_gen_addi_tl(EA
, EA
, 8);
1243 get_fpr(t0
, rD(ctx
->opcode
));
1244 gen_qemu_st64_i64(ctx
, t0
, EA
);
1246 get_fpr(t0
, rD(ctx
->opcode
));
1247 gen_qemu_st64_i64(ctx
, t0
, EA
);
1248 tcg_gen_addi_tl(EA
, EA
, 8);
1249 get_fpr(t0
, rD(ctx
->opcode
) + 1);
1250 gen_qemu_st64_i64(ctx
, t0
, EA
);
1253 tcg_temp_free_i64(t0
);
1257 static inline void gen_qemu_st32fiw(DisasContext
*ctx
, TCGv_i64 arg1
, TCGv arg2
)
1259 TCGv t0
= tcg_temp_new();
1260 tcg_gen_trunc_i64_tl(t0
, arg1
),
1261 gen_qemu_st32(ctx
, t0
, arg2
);
1265 GEN_STXF(stfiw
, st32fiw
, 0x17, 0x1E, PPC_FLOAT_STFIWX
);
1267 /* POWER2 specific instructions */
1268 /* Quad manipulation (load/store two floats at a time) */
1271 static void gen_lfq(DisasContext
*ctx
)
1273 int rd
= rD(ctx
->opcode
);
1276 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1277 t0
= tcg_temp_new();
1278 t1
= tcg_temp_new_i64();
1279 gen_addr_imm_index(ctx
, t0
, 0);
1280 gen_qemu_ld64_i64(ctx
, t1
, t0
);
1282 gen_addr_add(ctx
, t0
, t0
, 8);
1283 gen_qemu_ld64_i64(ctx
, t1
, t0
);
1284 set_fpr((rd
+ 1) % 32, t1
);
1286 tcg_temp_free_i64(t1
);
1290 static void gen_lfqu(DisasContext
*ctx
)
1292 int ra
= rA(ctx
->opcode
);
1293 int rd
= rD(ctx
->opcode
);
1296 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1297 t0
= tcg_temp_new();
1298 t1
= tcg_temp_new();
1299 t2
= tcg_temp_new_i64();
1300 gen_addr_imm_index(ctx
, t0
, 0);
1301 gen_qemu_ld64_i64(ctx
, t2
, t0
);
1303 gen_addr_add(ctx
, t1
, t0
, 8);
1304 gen_qemu_ld64_i64(ctx
, t2
, t1
);
1305 set_fpr((rd
+ 1) % 32, t2
);
1307 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
1311 tcg_temp_free_i64(t2
);
1315 static void gen_lfqux(DisasContext
*ctx
)
1317 int ra
= rA(ctx
->opcode
);
1318 int rd
= rD(ctx
->opcode
);
1319 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1322 t2
= tcg_temp_new_i64();
1323 t0
= tcg_temp_new();
1324 gen_addr_reg_index(ctx
, t0
);
1325 gen_qemu_ld64_i64(ctx
, t2
, t0
);
1327 t1
= tcg_temp_new();
1328 gen_addr_add(ctx
, t1
, t0
, 8);
1329 gen_qemu_ld64_i64(ctx
, t2
, t1
);
1330 set_fpr((rd
+ 1) % 32, t2
);
1333 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
1336 tcg_temp_free_i64(t2
);
1340 static void gen_lfqx(DisasContext
*ctx
)
1342 int rd
= rD(ctx
->opcode
);
1345 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1346 t0
= tcg_temp_new();
1347 t1
= tcg_temp_new_i64();
1348 gen_addr_reg_index(ctx
, t0
);
1349 gen_qemu_ld64_i64(ctx
, t1
, t0
);
1351 gen_addr_add(ctx
, t0
, t0
, 8);
1352 gen_qemu_ld64_i64(ctx
, t1
, t0
);
1353 set_fpr((rd
+ 1) % 32, t1
);
1355 tcg_temp_free_i64(t1
);
1359 static void gen_stfq(DisasContext
*ctx
)
1361 int rd
= rD(ctx
->opcode
);
1364 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1365 t0
= tcg_temp_new();
1366 t1
= tcg_temp_new_i64();
1367 gen_addr_imm_index(ctx
, t0
, 0);
1369 gen_qemu_st64_i64(ctx
, t1
, t0
);
1370 gen_addr_add(ctx
, t0
, t0
, 8);
1371 get_fpr(t1
, (rd
+ 1) % 32);
1372 gen_qemu_st64_i64(ctx
, t1
, t0
);
1374 tcg_temp_free_i64(t1
);
1378 static void gen_stfqu(DisasContext
*ctx
)
1380 int ra
= rA(ctx
->opcode
);
1381 int rd
= rD(ctx
->opcode
);
1384 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1385 t2
= tcg_temp_new_i64();
1386 t0
= tcg_temp_new();
1387 gen_addr_imm_index(ctx
, t0
, 0);
1389 gen_qemu_st64_i64(ctx
, t2
, t0
);
1390 t1
= tcg_temp_new();
1391 gen_addr_add(ctx
, t1
, t0
, 8);
1392 get_fpr(t2
, (rd
+ 1) % 32);
1393 gen_qemu_st64_i64(ctx
, t2
, t1
);
1396 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
1399 tcg_temp_free_i64(t2
);
1403 static void gen_stfqux(DisasContext
*ctx
)
1405 int ra
= rA(ctx
->opcode
);
1406 int rd
= rD(ctx
->opcode
);
1409 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1410 t2
= tcg_temp_new_i64();
1411 t0
= tcg_temp_new();
1412 gen_addr_reg_index(ctx
, t0
);
1414 gen_qemu_st64_i64(ctx
, t2
, t0
);
1415 t1
= tcg_temp_new();
1416 gen_addr_add(ctx
, t1
, t0
, 8);
1417 get_fpr(t2
, (rd
+ 1) % 32);
1418 gen_qemu_st64_i64(ctx
, t2
, t1
);
1421 tcg_gen_mov_tl(cpu_gpr
[ra
], t0
);
1424 tcg_temp_free_i64(t2
);
1428 static void gen_stfqx(DisasContext
*ctx
)
1430 int rd
= rD(ctx
->opcode
);
1433 gen_set_access_type(ctx
, ACCESS_FLOAT
);
1434 t1
= tcg_temp_new_i64();
1435 t0
= tcg_temp_new();
1436 gen_addr_reg_index(ctx
, t0
);
1438 gen_qemu_st64_i64(ctx
, t1
, t0
);
1439 gen_addr_add(ctx
, t0
, t0
, 8);
1440 get_fpr(t1
, (rd
+ 1) % 32);
1441 gen_qemu_st64_i64(ctx
, t1
, t0
);
1443 tcg_temp_free_i64(t1
);
1446 #undef _GEN_FLOAT_ACB
1447 #undef GEN_FLOAT_ACB
1448 #undef _GEN_FLOAT_AB
1450 #undef _GEN_FLOAT_AC