ppc: Add support for 'mffscrn','mffscrni' instructions
[qemu/ar7.git] / target / ppc / translate / fp-impl.inc.c
blob75f9523b0748f9467a585b9e1565ed604f26ac19
1 /*
2 * translate-fp.c
4 * Standard FPU translation
5 */
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(cpu_env, arg);
15 gen_helper_float_check_status(cpu_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
24 tcg_temp_free_i32(tmp);
26 #else
27 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
31 #endif
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
36 { \
37 TCGv_i64 t0; \
38 TCGv_i64 t1; \
39 TCGv_i64 t2; \
40 TCGv_i64 t3; \
41 if (unlikely(!ctx->fpu_enabled)) { \
42 gen_exception(ctx, POWERPC_EXCP_FPU); \
43 return; \
44 } \
45 t0 = tcg_temp_new_i64(); \
46 t1 = tcg_temp_new_i64(); \
47 t2 = tcg_temp_new_i64(); \
48 t3 = tcg_temp_new_i64(); \
49 gen_reset_fpstatus(); \
50 get_fpr(t0, rA(ctx->opcode)); \
51 get_fpr(t1, rC(ctx->opcode)); \
52 get_fpr(t2, rB(ctx->opcode)); \
53 gen_helper_f##op(t3, cpu_env, t0, t1, t2); \
54 if (isfloat) { \
55 gen_helper_frsp(t3, cpu_env, t3); \
56 } \
57 set_fpr(rD(ctx->opcode), t3); \
58 if (set_fprf) { \
59 gen_compute_fprf_float64(t3); \
60 } \
61 if (unlikely(Rc(ctx->opcode) != 0)) { \
62 gen_set_cr1_from_fpscr(ctx); \
63 } \
64 tcg_temp_free_i64(t0); \
65 tcg_temp_free_i64(t1); \
66 tcg_temp_free_i64(t2); \
67 tcg_temp_free_i64(t3); \
70 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
71 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
72 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
74 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
75 static void gen_f##name(DisasContext *ctx) \
76 { \
77 TCGv_i64 t0; \
78 TCGv_i64 t1; \
79 TCGv_i64 t2; \
80 if (unlikely(!ctx->fpu_enabled)) { \
81 gen_exception(ctx, POWERPC_EXCP_FPU); \
82 return; \
83 } \
84 t0 = tcg_temp_new_i64(); \
85 t1 = tcg_temp_new_i64(); \
86 t2 = tcg_temp_new_i64(); \
87 gen_reset_fpstatus(); \
88 get_fpr(t0, rA(ctx->opcode)); \
89 get_fpr(t1, rB(ctx->opcode)); \
90 gen_helper_f##op(t2, cpu_env, t0, t1); \
91 if (isfloat) { \
92 gen_helper_frsp(t2, cpu_env, t2); \
93 } \
94 set_fpr(rD(ctx->opcode), t2); \
95 if (set_fprf) { \
96 gen_compute_fprf_float64(t2); \
97 } \
98 if (unlikely(Rc(ctx->opcode) != 0)) { \
99 gen_set_cr1_from_fpscr(ctx); \
101 tcg_temp_free_i64(t0); \
102 tcg_temp_free_i64(t1); \
103 tcg_temp_free_i64(t2); \
105 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
106 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
107 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
109 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
110 static void gen_f##name(DisasContext *ctx) \
112 TCGv_i64 t0; \
113 TCGv_i64 t1; \
114 TCGv_i64 t2; \
115 if (unlikely(!ctx->fpu_enabled)) { \
116 gen_exception(ctx, POWERPC_EXCP_FPU); \
117 return; \
119 t0 = tcg_temp_new_i64(); \
120 t1 = tcg_temp_new_i64(); \
121 t2 = tcg_temp_new_i64(); \
122 gen_reset_fpstatus(); \
123 get_fpr(t0, rA(ctx->opcode)); \
124 get_fpr(t1, rC(ctx->opcode)); \
125 gen_helper_f##op(t2, cpu_env, t0, t1); \
126 if (isfloat) { \
127 gen_helper_frsp(t2, cpu_env, t2); \
129 set_fpr(rD(ctx->opcode), t2); \
130 if (set_fprf) { \
131 gen_compute_fprf_float64(t2); \
133 if (unlikely(Rc(ctx->opcode) != 0)) { \
134 gen_set_cr1_from_fpscr(ctx); \
136 tcg_temp_free_i64(t0); \
137 tcg_temp_free_i64(t1); \
138 tcg_temp_free_i64(t2); \
140 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
141 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
142 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
144 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
145 static void gen_f##name(DisasContext *ctx) \
147 TCGv_i64 t0; \
148 TCGv_i64 t1; \
149 if (unlikely(!ctx->fpu_enabled)) { \
150 gen_exception(ctx, POWERPC_EXCP_FPU); \
151 return; \
153 t0 = tcg_temp_new_i64(); \
154 t1 = tcg_temp_new_i64(); \
155 gen_reset_fpstatus(); \
156 get_fpr(t0, rB(ctx->opcode)); \
157 gen_helper_f##name(t1, cpu_env, t0); \
158 set_fpr(rD(ctx->opcode), t1); \
159 if (set_fprf) { \
160 gen_compute_fprf_float64(t1); \
162 if (unlikely(Rc(ctx->opcode) != 0)) { \
163 gen_set_cr1_from_fpscr(ctx); \
165 tcg_temp_free_i64(t0); \
166 tcg_temp_free_i64(t1); \
169 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
170 static void gen_f##name(DisasContext *ctx) \
172 TCGv_i64 t0; \
173 TCGv_i64 t1; \
174 if (unlikely(!ctx->fpu_enabled)) { \
175 gen_exception(ctx, POWERPC_EXCP_FPU); \
176 return; \
178 t0 = tcg_temp_new_i64(); \
179 t1 = tcg_temp_new_i64(); \
180 gen_reset_fpstatus(); \
181 get_fpr(t0, rB(ctx->opcode)); \
182 gen_helper_f##name(t1, cpu_env, t0); \
183 set_fpr(rD(ctx->opcode), t1); \
184 if (set_fprf) { \
185 gen_compute_fprf_float64(t1); \
187 if (unlikely(Rc(ctx->opcode) != 0)) { \
188 gen_set_cr1_from_fpscr(ctx); \
190 tcg_temp_free_i64(t0); \
191 tcg_temp_free_i64(t1); \
194 /* fadd - fadds */
195 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
196 /* fdiv - fdivs */
197 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
198 /* fmul - fmuls */
199 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
201 /* fre */
202 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
204 /* fres */
205 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
207 /* frsqrte */
208 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
210 /* frsqrtes */
211 static void gen_frsqrtes(DisasContext *ctx)
213 TCGv_i64 t0;
214 TCGv_i64 t1;
215 if (unlikely(!ctx->fpu_enabled)) {
216 gen_exception(ctx, POWERPC_EXCP_FPU);
217 return;
219 t0 = tcg_temp_new_i64();
220 t1 = tcg_temp_new_i64();
221 gen_reset_fpstatus();
222 get_fpr(t0, rB(ctx->opcode));
223 gen_helper_frsqrte(t1, cpu_env, t0);
224 gen_helper_frsp(t1, cpu_env, t1);
225 set_fpr(rD(ctx->opcode), t1);
226 gen_compute_fprf_float64(t1);
227 if (unlikely(Rc(ctx->opcode) != 0)) {
228 gen_set_cr1_from_fpscr(ctx);
230 tcg_temp_free_i64(t0);
231 tcg_temp_free_i64(t1);
234 /* fsel */
235 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
236 /* fsub - fsubs */
237 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
238 /* Optional: */
240 /* fsqrt */
241 static void gen_fsqrt(DisasContext *ctx)
243 TCGv_i64 t0;
244 TCGv_i64 t1;
245 if (unlikely(!ctx->fpu_enabled)) {
246 gen_exception(ctx, POWERPC_EXCP_FPU);
247 return;
249 t0 = tcg_temp_new_i64();
250 t1 = tcg_temp_new_i64();
251 gen_reset_fpstatus();
252 get_fpr(t0, rB(ctx->opcode));
253 gen_helper_fsqrt(t1, cpu_env, t0);
254 set_fpr(rD(ctx->opcode), t1);
255 gen_compute_fprf_float64(t1);
256 if (unlikely(Rc(ctx->opcode) != 0)) {
257 gen_set_cr1_from_fpscr(ctx);
259 tcg_temp_free_i64(t0);
260 tcg_temp_free_i64(t1);
263 static void gen_fsqrts(DisasContext *ctx)
265 TCGv_i64 t0;
266 TCGv_i64 t1;
267 if (unlikely(!ctx->fpu_enabled)) {
268 gen_exception(ctx, POWERPC_EXCP_FPU);
269 return;
271 t0 = tcg_temp_new_i64();
272 t1 = tcg_temp_new_i64();
273 gen_reset_fpstatus();
274 get_fpr(t0, rB(ctx->opcode));
275 gen_helper_fsqrt(t1, cpu_env, t0);
276 gen_helper_frsp(t1, cpu_env, t1);
277 set_fpr(rD(ctx->opcode), t1);
278 gen_compute_fprf_float64(t1);
279 if (unlikely(Rc(ctx->opcode) != 0)) {
280 gen_set_cr1_from_fpscr(ctx);
282 tcg_temp_free_i64(t0);
283 tcg_temp_free_i64(t1);
286 /*** Floating-Point multiply-and-add ***/
287 /* fmadd - fmadds */
288 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
289 /* fmsub - fmsubs */
290 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
291 /* fnmadd - fnmadds */
292 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
293 /* fnmsub - fnmsubs */
294 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
296 /*** Floating-Point round & convert ***/
297 /* fctiw */
298 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
299 /* fctiwu */
300 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
301 /* fctiwz */
302 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
303 /* fctiwuz */
304 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
305 /* frsp */
306 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
307 /* fcfid */
308 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
309 /* fcfids */
310 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
311 /* fcfidu */
312 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
313 /* fcfidus */
314 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
315 /* fctid */
316 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
317 /* fctidu */
318 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
319 /* fctidz */
320 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
321 /* fctidu */
322 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
324 /* frin */
325 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
326 /* friz */
327 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
328 /* frip */
329 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
330 /* frim */
331 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
333 static void gen_ftdiv(DisasContext *ctx)
335 TCGv_i64 t0;
336 TCGv_i64 t1;
337 if (unlikely(!ctx->fpu_enabled)) {
338 gen_exception(ctx, POWERPC_EXCP_FPU);
339 return;
341 t0 = tcg_temp_new_i64();
342 t1 = tcg_temp_new_i64();
343 get_fpr(t0, rA(ctx->opcode));
344 get_fpr(t1, rB(ctx->opcode));
345 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1);
346 tcg_temp_free_i64(t0);
347 tcg_temp_free_i64(t1);
350 static void gen_ftsqrt(DisasContext *ctx)
352 TCGv_i64 t0;
353 if (unlikely(!ctx->fpu_enabled)) {
354 gen_exception(ctx, POWERPC_EXCP_FPU);
355 return;
357 t0 = tcg_temp_new_i64();
358 get_fpr(t0, rB(ctx->opcode));
359 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0);
360 tcg_temp_free_i64(t0);
365 /*** Floating-Point compare ***/
367 /* fcmpo */
368 static void gen_fcmpo(DisasContext *ctx)
370 TCGv_i32 crf;
371 TCGv_i64 t0;
372 TCGv_i64 t1;
373 if (unlikely(!ctx->fpu_enabled)) {
374 gen_exception(ctx, POWERPC_EXCP_FPU);
375 return;
377 t0 = tcg_temp_new_i64();
378 t1 = tcg_temp_new_i64();
379 gen_reset_fpstatus();
380 crf = tcg_const_i32(crfD(ctx->opcode));
381 get_fpr(t0, rA(ctx->opcode));
382 get_fpr(t1, rB(ctx->opcode));
383 gen_helper_fcmpo(cpu_env, t0, t1, crf);
384 tcg_temp_free_i32(crf);
385 gen_helper_float_check_status(cpu_env);
386 tcg_temp_free_i64(t0);
387 tcg_temp_free_i64(t1);
390 /* fcmpu */
391 static void gen_fcmpu(DisasContext *ctx)
393 TCGv_i32 crf;
394 TCGv_i64 t0;
395 TCGv_i64 t1;
396 if (unlikely(!ctx->fpu_enabled)) {
397 gen_exception(ctx, POWERPC_EXCP_FPU);
398 return;
400 t0 = tcg_temp_new_i64();
401 t1 = tcg_temp_new_i64();
402 gen_reset_fpstatus();
403 crf = tcg_const_i32(crfD(ctx->opcode));
404 get_fpr(t0, rA(ctx->opcode));
405 get_fpr(t1, rB(ctx->opcode));
406 gen_helper_fcmpu(cpu_env, t0, t1, crf);
407 tcg_temp_free_i32(crf);
408 gen_helper_float_check_status(cpu_env);
409 tcg_temp_free_i64(t0);
410 tcg_temp_free_i64(t1);
413 /*** Floating-point move ***/
414 /* fabs */
415 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
416 static void gen_fabs(DisasContext *ctx)
418 TCGv_i64 t0;
419 TCGv_i64 t1;
420 if (unlikely(!ctx->fpu_enabled)) {
421 gen_exception(ctx, POWERPC_EXCP_FPU);
422 return;
424 t0 = tcg_temp_new_i64();
425 t1 = tcg_temp_new_i64();
426 get_fpr(t0, rB(ctx->opcode));
427 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63));
428 set_fpr(rD(ctx->opcode), t1);
429 if (unlikely(Rc(ctx->opcode))) {
430 gen_set_cr1_from_fpscr(ctx);
432 tcg_temp_free_i64(t0);
433 tcg_temp_free_i64(t1);
436 /* fmr - fmr. */
437 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
438 static void gen_fmr(DisasContext *ctx)
440 TCGv_i64 t0;
441 if (unlikely(!ctx->fpu_enabled)) {
442 gen_exception(ctx, POWERPC_EXCP_FPU);
443 return;
445 t0 = tcg_temp_new_i64();
446 get_fpr(t0, rB(ctx->opcode));
447 set_fpr(rD(ctx->opcode), t0);
448 if (unlikely(Rc(ctx->opcode))) {
449 gen_set_cr1_from_fpscr(ctx);
451 tcg_temp_free_i64(t0);
454 /* fnabs */
455 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
456 static void gen_fnabs(DisasContext *ctx)
458 TCGv_i64 t0;
459 TCGv_i64 t1;
460 if (unlikely(!ctx->fpu_enabled)) {
461 gen_exception(ctx, POWERPC_EXCP_FPU);
462 return;
464 t0 = tcg_temp_new_i64();
465 t1 = tcg_temp_new_i64();
466 get_fpr(t0, rB(ctx->opcode));
467 tcg_gen_ori_i64(t1, t0, 1ULL << 63);
468 set_fpr(rD(ctx->opcode), t1);
469 if (unlikely(Rc(ctx->opcode))) {
470 gen_set_cr1_from_fpscr(ctx);
472 tcg_temp_free_i64(t0);
473 tcg_temp_free_i64(t1);
476 /* fneg */
477 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
478 static void gen_fneg(DisasContext *ctx)
480 TCGv_i64 t0;
481 TCGv_i64 t1;
482 if (unlikely(!ctx->fpu_enabled)) {
483 gen_exception(ctx, POWERPC_EXCP_FPU);
484 return;
486 t0 = tcg_temp_new_i64();
487 t1 = tcg_temp_new_i64();
488 get_fpr(t0, rB(ctx->opcode));
489 tcg_gen_xori_i64(t1, t0, 1ULL << 63);
490 set_fpr(rD(ctx->opcode), t1);
491 if (unlikely(Rc(ctx->opcode))) {
492 gen_set_cr1_from_fpscr(ctx);
494 tcg_temp_free_i64(t0);
495 tcg_temp_free_i64(t1);
498 /* fcpsgn: PowerPC 2.05 specification */
499 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
500 static void gen_fcpsgn(DisasContext *ctx)
502 TCGv_i64 t0;
503 TCGv_i64 t1;
504 TCGv_i64 t2;
505 if (unlikely(!ctx->fpu_enabled)) {
506 gen_exception(ctx, POWERPC_EXCP_FPU);
507 return;
509 t0 = tcg_temp_new_i64();
510 t1 = tcg_temp_new_i64();
511 t2 = tcg_temp_new_i64();
512 get_fpr(t0, rA(ctx->opcode));
513 get_fpr(t1, rB(ctx->opcode));
514 tcg_gen_deposit_i64(t2, t0, t1, 0, 63);
515 set_fpr(rD(ctx->opcode), t2);
516 if (unlikely(Rc(ctx->opcode))) {
517 gen_set_cr1_from_fpscr(ctx);
519 tcg_temp_free_i64(t0);
520 tcg_temp_free_i64(t1);
521 tcg_temp_free_i64(t2);
524 static void gen_fmrgew(DisasContext *ctx)
526 TCGv_i64 b0;
527 TCGv_i64 t0;
528 TCGv_i64 t1;
529 if (unlikely(!ctx->fpu_enabled)) {
530 gen_exception(ctx, POWERPC_EXCP_FPU);
531 return;
533 b0 = tcg_temp_new_i64();
534 t0 = tcg_temp_new_i64();
535 t1 = tcg_temp_new_i64();
536 get_fpr(t0, rB(ctx->opcode));
537 tcg_gen_shri_i64(b0, t0, 32);
538 get_fpr(t0, rA(ctx->opcode));
539 tcg_gen_deposit_i64(t1, t0, b0, 0, 32);
540 set_fpr(rD(ctx->opcode), t1);
541 tcg_temp_free_i64(b0);
542 tcg_temp_free_i64(t0);
543 tcg_temp_free_i64(t1);
546 static void gen_fmrgow(DisasContext *ctx)
548 TCGv_i64 t0;
549 TCGv_i64 t1;
550 TCGv_i64 t2;
551 if (unlikely(!ctx->fpu_enabled)) {
552 gen_exception(ctx, POWERPC_EXCP_FPU);
553 return;
555 t0 = tcg_temp_new_i64();
556 t1 = tcg_temp_new_i64();
557 t2 = tcg_temp_new_i64();
558 get_fpr(t0, rB(ctx->opcode));
559 get_fpr(t1, rA(ctx->opcode));
560 tcg_gen_deposit_i64(t2, t0, t1, 32, 32);
561 set_fpr(rD(ctx->opcode), t2);
562 tcg_temp_free_i64(t0);
563 tcg_temp_free_i64(t1);
564 tcg_temp_free_i64(t2);
567 /*** Floating-Point status & ctrl register ***/
569 /* mcrfs */
570 static void gen_mcrfs(DisasContext *ctx)
572 TCGv tmp = tcg_temp_new();
573 TCGv_i32 tmask;
574 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
575 int bfa;
576 int nibble;
577 int shift;
579 if (unlikely(!ctx->fpu_enabled)) {
580 gen_exception(ctx, POWERPC_EXCP_FPU);
581 return;
583 bfa = crfS(ctx->opcode);
584 nibble = 7 - bfa;
585 shift = 4 * nibble;
586 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
587 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
588 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)],
589 0xf);
590 tcg_temp_free(tmp);
591 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
592 /* Only the exception bits (including FX) should be cleared if read */
593 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr,
594 ~((0xF << shift) & FP_EX_CLEAR_BITS));
595 /* FEX and VX need to be updated, so don't set fpscr directly */
596 tmask = tcg_const_i32(1 << nibble);
597 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask);
598 tcg_temp_free_i32(tmask);
599 tcg_temp_free_i64(tnew_fpscr);
602 /* mffs */
603 static void gen_mffs(DisasContext *ctx)
605 TCGv_i64 t0;
606 if (unlikely(!ctx->fpu_enabled)) {
607 gen_exception(ctx, POWERPC_EXCP_FPU);
608 return;
610 t0 = tcg_temp_new_i64();
611 gen_reset_fpstatus();
612 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
613 set_fpr(rD(ctx->opcode), t0);
614 if (unlikely(Rc(ctx->opcode))) {
615 gen_set_cr1_from_fpscr(ctx);
617 tcg_temp_free_i64(t0);
620 /* mffsl */
621 static void gen_mffsl(DisasContext *ctx)
623 TCGv_i64 t0;
625 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
626 return gen_mffs(ctx);
629 if (unlikely(!ctx->fpu_enabled)) {
630 gen_exception(ctx, POWERPC_EXCP_FPU);
631 return;
633 t0 = tcg_temp_new_i64();
634 gen_reset_fpstatus();
635 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
636 /* Mask everything except mode, status, and enables. */
637 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN);
638 set_fpr(rD(ctx->opcode), t0);
639 tcg_temp_free_i64(t0);
642 static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1)
644 TCGv_i64 t0 = tcg_temp_new_i64();
645 TCGv_i32 mask = tcg_const_i32(0x0001);
647 gen_reset_fpstatus();
648 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
649 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN);
650 set_fpr(rD(ctx->opcode), t0);
652 /* Mask FPSCR value to clear RN. */
653 tcg_gen_andi_i64(t0, t0, ~FP_RN);
655 /* Merge RN into FPSCR value. */
656 tcg_gen_or_i64(t0, t0, t1);
658 gen_helper_store_fpscr(cpu_env, t0, mask);
660 tcg_temp_free_i32(mask);
661 tcg_temp_free_i64(t0);
664 /* mffscrn */
665 static void gen_mffscrn(DisasContext *ctx)
667 TCGv_i64 t1;
669 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
670 return gen_mffs(ctx);
673 if (unlikely(!ctx->fpu_enabled)) {
674 gen_exception(ctx, POWERPC_EXCP_FPU);
675 return;
678 t1 = tcg_temp_new_i64();
679 get_fpr(t1, rB(ctx->opcode));
680 /* Mask FRB to get just RN. */
681 tcg_gen_andi_i64(t1, t1, FP_RN);
683 gen_helper_mffscrn(ctx, t1);
685 tcg_temp_free_i64(t1);
688 /* mffscrni */
689 static void gen_mffscrni(DisasContext *ctx)
691 TCGv_i64 t1;
693 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
694 return gen_mffs(ctx);
697 if (unlikely(!ctx->fpu_enabled)) {
698 gen_exception(ctx, POWERPC_EXCP_FPU);
699 return;
702 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode));
704 gen_helper_mffscrn(ctx, t1);
706 tcg_temp_free_i64(t1);
709 /* mtfsb0 */
710 static void gen_mtfsb0(DisasContext *ctx)
712 uint8_t crb;
714 if (unlikely(!ctx->fpu_enabled)) {
715 gen_exception(ctx, POWERPC_EXCP_FPU);
716 return;
718 crb = 31 - crbD(ctx->opcode);
719 gen_reset_fpstatus();
720 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
721 TCGv_i32 t0;
722 t0 = tcg_const_i32(crb);
723 gen_helper_fpscr_clrbit(cpu_env, t0);
724 tcg_temp_free_i32(t0);
726 if (unlikely(Rc(ctx->opcode) != 0)) {
727 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
728 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
732 /* mtfsb1 */
733 static void gen_mtfsb1(DisasContext *ctx)
735 uint8_t crb;
737 if (unlikely(!ctx->fpu_enabled)) {
738 gen_exception(ctx, POWERPC_EXCP_FPU);
739 return;
741 crb = 31 - crbD(ctx->opcode);
742 gen_reset_fpstatus();
743 /* XXX: we pretend we can only do IEEE floating-point computations */
744 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
745 TCGv_i32 t0;
746 t0 = tcg_const_i32(crb);
747 gen_helper_fpscr_setbit(cpu_env, t0);
748 tcg_temp_free_i32(t0);
750 if (unlikely(Rc(ctx->opcode) != 0)) {
751 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
752 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
754 /* We can raise a differed exception */
755 gen_helper_float_check_status(cpu_env);
758 /* mtfsf */
759 static void gen_mtfsf(DisasContext *ctx)
761 TCGv_i32 t0;
762 TCGv_i64 t1;
763 int flm, l, w;
765 if (unlikely(!ctx->fpu_enabled)) {
766 gen_exception(ctx, POWERPC_EXCP_FPU);
767 return;
769 flm = FPFLM(ctx->opcode);
770 l = FPL(ctx->opcode);
771 w = FPW(ctx->opcode);
772 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
773 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
774 return;
776 gen_reset_fpstatus();
777 if (l) {
778 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
779 } else {
780 t0 = tcg_const_i32(flm << (w * 8));
782 t1 = tcg_temp_new_i64();
783 get_fpr(t1, rB(ctx->opcode));
784 gen_helper_store_fpscr(cpu_env, t1, t0);
785 tcg_temp_free_i32(t0);
786 if (unlikely(Rc(ctx->opcode) != 0)) {
787 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
788 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
790 /* We can raise a differed exception */
791 gen_helper_float_check_status(cpu_env);
792 tcg_temp_free_i64(t1);
795 /* mtfsfi */
796 static void gen_mtfsfi(DisasContext *ctx)
798 int bf, sh, w;
799 TCGv_i64 t0;
800 TCGv_i32 t1;
802 if (unlikely(!ctx->fpu_enabled)) {
803 gen_exception(ctx, POWERPC_EXCP_FPU);
804 return;
806 w = FPW(ctx->opcode);
807 bf = FPBF(ctx->opcode);
808 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
809 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
810 return;
812 sh = (8 * w) + 7 - bf;
813 gen_reset_fpstatus();
814 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
815 t1 = tcg_const_i32(1 << sh);
816 gen_helper_store_fpscr(cpu_env, t0, t1);
817 tcg_temp_free_i64(t0);
818 tcg_temp_free_i32(t1);
819 if (unlikely(Rc(ctx->opcode) != 0)) {
820 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
821 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
823 /* We can raise a differed exception */
824 gen_helper_float_check_status(cpu_env);
827 /*** Floating-point load ***/
828 #define GEN_LDF(name, ldop, opc, type) \
829 static void glue(gen_, name)(DisasContext *ctx) \
831 TCGv EA; \
832 TCGv_i64 t0; \
833 if (unlikely(!ctx->fpu_enabled)) { \
834 gen_exception(ctx, POWERPC_EXCP_FPU); \
835 return; \
837 gen_set_access_type(ctx, ACCESS_FLOAT); \
838 EA = tcg_temp_new(); \
839 t0 = tcg_temp_new_i64(); \
840 gen_addr_imm_index(ctx, EA, 0); \
841 gen_qemu_##ldop(ctx, t0, EA); \
842 set_fpr(rD(ctx->opcode), t0); \
843 tcg_temp_free(EA); \
844 tcg_temp_free_i64(t0); \
847 #define GEN_LDUF(name, ldop, opc, type) \
848 static void glue(gen_, name##u)(DisasContext *ctx) \
850 TCGv EA; \
851 TCGv_i64 t0; \
852 if (unlikely(!ctx->fpu_enabled)) { \
853 gen_exception(ctx, POWERPC_EXCP_FPU); \
854 return; \
856 if (unlikely(rA(ctx->opcode) == 0)) { \
857 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
858 return; \
860 gen_set_access_type(ctx, ACCESS_FLOAT); \
861 EA = tcg_temp_new(); \
862 t0 = tcg_temp_new_i64(); \
863 gen_addr_imm_index(ctx, EA, 0); \
864 gen_qemu_##ldop(ctx, t0, EA); \
865 set_fpr(rD(ctx->opcode), t0); \
866 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
867 tcg_temp_free(EA); \
868 tcg_temp_free_i64(t0); \
871 #define GEN_LDUXF(name, ldop, opc, type) \
872 static void glue(gen_, name##ux)(DisasContext *ctx) \
874 TCGv EA; \
875 TCGv_i64 t0; \
876 if (unlikely(!ctx->fpu_enabled)) { \
877 gen_exception(ctx, POWERPC_EXCP_FPU); \
878 return; \
880 t0 = tcg_temp_new_i64(); \
881 if (unlikely(rA(ctx->opcode) == 0)) { \
882 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
883 return; \
885 gen_set_access_type(ctx, ACCESS_FLOAT); \
886 EA = tcg_temp_new(); \
887 gen_addr_reg_index(ctx, EA); \
888 gen_qemu_##ldop(ctx, t0, EA); \
889 set_fpr(rD(ctx->opcode), t0); \
890 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
891 tcg_temp_free(EA); \
892 tcg_temp_free_i64(t0); \
895 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
896 static void glue(gen_, name##x)(DisasContext *ctx) \
898 TCGv EA; \
899 TCGv_i64 t0; \
900 if (unlikely(!ctx->fpu_enabled)) { \
901 gen_exception(ctx, POWERPC_EXCP_FPU); \
902 return; \
904 gen_set_access_type(ctx, ACCESS_FLOAT); \
905 EA = tcg_temp_new(); \
906 t0 = tcg_temp_new_i64(); \
907 gen_addr_reg_index(ctx, EA); \
908 gen_qemu_##ldop(ctx, t0, EA); \
909 set_fpr(rD(ctx->opcode), t0); \
910 tcg_temp_free(EA); \
911 tcg_temp_free_i64(t0); \
914 #define GEN_LDFS(name, ldop, op, type) \
915 GEN_LDF(name, ldop, op | 0x20, type); \
916 GEN_LDUF(name, ldop, op | 0x21, type); \
917 GEN_LDUXF(name, ldop, op | 0x01, type); \
918 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
920 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
922 TCGv_i32 tmp = tcg_temp_new_i32();
923 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
924 gen_helper_todouble(dest, tmp);
925 tcg_temp_free_i32(tmp);
928 /* lfd lfdu lfdux lfdx */
929 GEN_LDFS(lfd, ld64_i64, 0x12, PPC_FLOAT);
930 /* lfs lfsu lfsux lfsx */
931 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
933 /* lfdepx (external PID lfdx) */
934 static void gen_lfdepx(DisasContext *ctx)
936 TCGv EA;
937 TCGv_i64 t0;
938 CHK_SV;
939 if (unlikely(!ctx->fpu_enabled)) {
940 gen_exception(ctx, POWERPC_EXCP_FPU);
941 return;
943 gen_set_access_type(ctx, ACCESS_FLOAT);
944 EA = tcg_temp_new();
945 t0 = tcg_temp_new_i64();
946 gen_addr_reg_index(ctx, EA);
947 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_Q));
948 set_fpr(rD(ctx->opcode), t0);
949 tcg_temp_free(EA);
950 tcg_temp_free_i64(t0);
953 /* lfdp */
954 static void gen_lfdp(DisasContext *ctx)
956 TCGv EA;
957 TCGv_i64 t0;
958 if (unlikely(!ctx->fpu_enabled)) {
959 gen_exception(ctx, POWERPC_EXCP_FPU);
960 return;
962 gen_set_access_type(ctx, ACCESS_FLOAT);
963 EA = tcg_temp_new();
964 gen_addr_imm_index(ctx, EA, 0);
965 t0 = tcg_temp_new_i64();
967 * We only need to swap high and low halves. gen_qemu_ld64_i64
968 * does necessary 64-bit byteswap already.
970 if (unlikely(ctx->le_mode)) {
971 gen_qemu_ld64_i64(ctx, t0, EA);
972 set_fpr(rD(ctx->opcode) + 1, t0);
973 tcg_gen_addi_tl(EA, EA, 8);
974 gen_qemu_ld64_i64(ctx, t0, EA);
975 set_fpr(rD(ctx->opcode), t0);
976 } else {
977 gen_qemu_ld64_i64(ctx, t0, EA);
978 set_fpr(rD(ctx->opcode), t0);
979 tcg_gen_addi_tl(EA, EA, 8);
980 gen_qemu_ld64_i64(ctx, t0, EA);
981 set_fpr(rD(ctx->opcode) + 1, t0);
983 tcg_temp_free(EA);
984 tcg_temp_free_i64(t0);
987 /* lfdpx */
988 static void gen_lfdpx(DisasContext *ctx)
990 TCGv EA;
991 TCGv_i64 t0;
992 if (unlikely(!ctx->fpu_enabled)) {
993 gen_exception(ctx, POWERPC_EXCP_FPU);
994 return;
996 gen_set_access_type(ctx, ACCESS_FLOAT);
997 EA = tcg_temp_new();
998 gen_addr_reg_index(ctx, EA);
999 t0 = tcg_temp_new_i64();
1001 * We only need to swap high and low halves. gen_qemu_ld64_i64
1002 * does necessary 64-bit byteswap already.
1004 if (unlikely(ctx->le_mode)) {
1005 gen_qemu_ld64_i64(ctx, t0, EA);
1006 set_fpr(rD(ctx->opcode) + 1, t0);
1007 tcg_gen_addi_tl(EA, EA, 8);
1008 gen_qemu_ld64_i64(ctx, t0, EA);
1009 set_fpr(rD(ctx->opcode), t0);
1010 } else {
1011 gen_qemu_ld64_i64(ctx, t0, EA);
1012 set_fpr(rD(ctx->opcode), t0);
1013 tcg_gen_addi_tl(EA, EA, 8);
1014 gen_qemu_ld64_i64(ctx, t0, EA);
1015 set_fpr(rD(ctx->opcode) + 1, t0);
1017 tcg_temp_free(EA);
1018 tcg_temp_free_i64(t0);
1021 /* lfiwax */
1022 static void gen_lfiwax(DisasContext *ctx)
1024 TCGv EA;
1025 TCGv t0;
1026 TCGv_i64 t1;
1027 if (unlikely(!ctx->fpu_enabled)) {
1028 gen_exception(ctx, POWERPC_EXCP_FPU);
1029 return;
1031 gen_set_access_type(ctx, ACCESS_FLOAT);
1032 EA = tcg_temp_new();
1033 t0 = tcg_temp_new();
1034 t1 = tcg_temp_new_i64();
1035 gen_addr_reg_index(ctx, EA);
1036 gen_qemu_ld32s(ctx, t0, EA);
1037 tcg_gen_ext_tl_i64(t1, t0);
1038 set_fpr(rD(ctx->opcode), t1);
1039 tcg_temp_free(EA);
1040 tcg_temp_free(t0);
1041 tcg_temp_free_i64(t1);
1044 /* lfiwzx */
1045 static void gen_lfiwzx(DisasContext *ctx)
1047 TCGv EA;
1048 TCGv_i64 t0;
1049 if (unlikely(!ctx->fpu_enabled)) {
1050 gen_exception(ctx, POWERPC_EXCP_FPU);
1051 return;
1053 gen_set_access_type(ctx, ACCESS_FLOAT);
1054 EA = tcg_temp_new();
1055 t0 = tcg_temp_new_i64();
1056 gen_addr_reg_index(ctx, EA);
1057 gen_qemu_ld32u_i64(ctx, t0, EA);
1058 set_fpr(rD(ctx->opcode), t0);
1059 tcg_temp_free(EA);
1060 tcg_temp_free_i64(t0);
1062 /*** Floating-point store ***/
1063 #define GEN_STF(name, stop, opc, type) \
1064 static void glue(gen_, name)(DisasContext *ctx) \
1066 TCGv EA; \
1067 TCGv_i64 t0; \
1068 if (unlikely(!ctx->fpu_enabled)) { \
1069 gen_exception(ctx, POWERPC_EXCP_FPU); \
1070 return; \
1072 gen_set_access_type(ctx, ACCESS_FLOAT); \
1073 EA = tcg_temp_new(); \
1074 t0 = tcg_temp_new_i64(); \
1075 gen_addr_imm_index(ctx, EA, 0); \
1076 get_fpr(t0, rS(ctx->opcode)); \
1077 gen_qemu_##stop(ctx, t0, EA); \
1078 tcg_temp_free(EA); \
1079 tcg_temp_free_i64(t0); \
1082 #define GEN_STUF(name, stop, opc, type) \
1083 static void glue(gen_, name##u)(DisasContext *ctx) \
1085 TCGv EA; \
1086 TCGv_i64 t0; \
1087 if (unlikely(!ctx->fpu_enabled)) { \
1088 gen_exception(ctx, POWERPC_EXCP_FPU); \
1089 return; \
1091 if (unlikely(rA(ctx->opcode) == 0)) { \
1092 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1093 return; \
1095 gen_set_access_type(ctx, ACCESS_FLOAT); \
1096 EA = tcg_temp_new(); \
1097 t0 = tcg_temp_new_i64(); \
1098 gen_addr_imm_index(ctx, EA, 0); \
1099 get_fpr(t0, rS(ctx->opcode)); \
1100 gen_qemu_##stop(ctx, t0, EA); \
1101 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1102 tcg_temp_free(EA); \
1103 tcg_temp_free_i64(t0); \
1106 #define GEN_STUXF(name, stop, opc, type) \
1107 static void glue(gen_, name##ux)(DisasContext *ctx) \
1109 TCGv EA; \
1110 TCGv_i64 t0; \
1111 if (unlikely(!ctx->fpu_enabled)) { \
1112 gen_exception(ctx, POWERPC_EXCP_FPU); \
1113 return; \
1115 if (unlikely(rA(ctx->opcode) == 0)) { \
1116 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1117 return; \
1119 gen_set_access_type(ctx, ACCESS_FLOAT); \
1120 EA = tcg_temp_new(); \
1121 t0 = tcg_temp_new_i64(); \
1122 gen_addr_reg_index(ctx, EA); \
1123 get_fpr(t0, rS(ctx->opcode)); \
1124 gen_qemu_##stop(ctx, t0, EA); \
1125 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1126 tcg_temp_free(EA); \
1127 tcg_temp_free_i64(t0); \
1130 #define GEN_STXF(name, stop, opc2, opc3, type) \
1131 static void glue(gen_, name##x)(DisasContext *ctx) \
1133 TCGv EA; \
1134 TCGv_i64 t0; \
1135 if (unlikely(!ctx->fpu_enabled)) { \
1136 gen_exception(ctx, POWERPC_EXCP_FPU); \
1137 return; \
1139 gen_set_access_type(ctx, ACCESS_FLOAT); \
1140 EA = tcg_temp_new(); \
1141 t0 = tcg_temp_new_i64(); \
1142 gen_addr_reg_index(ctx, EA); \
1143 get_fpr(t0, rS(ctx->opcode)); \
1144 gen_qemu_##stop(ctx, t0, EA); \
1145 tcg_temp_free(EA); \
1146 tcg_temp_free_i64(t0); \
1149 #define GEN_STFS(name, stop, op, type) \
1150 GEN_STF(name, stop, op | 0x20, type); \
1151 GEN_STUF(name, stop, op | 0x21, type); \
1152 GEN_STUXF(name, stop, op | 0x01, type); \
1153 GEN_STXF(name, stop, 0x17, op | 0x00, type)
1155 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
1157 TCGv_i32 tmp = tcg_temp_new_i32();
1158 gen_helper_tosingle(tmp, src);
1159 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
1160 tcg_temp_free_i32(tmp);
1163 /* stfd stfdu stfdux stfdx */
1164 GEN_STFS(stfd, st64_i64, 0x16, PPC_FLOAT);
1165 /* stfs stfsu stfsux stfsx */
1166 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
1168 /* stfdepx (external PID lfdx) */
1169 static void gen_stfdepx(DisasContext *ctx)
1171 TCGv EA;
1172 TCGv_i64 t0;
1173 CHK_SV;
1174 if (unlikely(!ctx->fpu_enabled)) {
1175 gen_exception(ctx, POWERPC_EXCP_FPU);
1176 return;
1178 gen_set_access_type(ctx, ACCESS_FLOAT);
1179 EA = tcg_temp_new();
1180 t0 = tcg_temp_new_i64();
1181 gen_addr_reg_index(ctx, EA);
1182 get_fpr(t0, rD(ctx->opcode));
1183 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_Q));
1184 tcg_temp_free(EA);
1185 tcg_temp_free_i64(t0);
1188 /* stfdp */
1189 static void gen_stfdp(DisasContext *ctx)
1191 TCGv EA;
1192 TCGv_i64 t0;
1193 if (unlikely(!ctx->fpu_enabled)) {
1194 gen_exception(ctx, POWERPC_EXCP_FPU);
1195 return;
1197 gen_set_access_type(ctx, ACCESS_FLOAT);
1198 EA = tcg_temp_new();
1199 t0 = tcg_temp_new_i64();
1200 gen_addr_imm_index(ctx, EA, 0);
1202 * We only need to swap high and low halves. gen_qemu_st64_i64
1203 * does necessary 64-bit byteswap already.
1205 if (unlikely(ctx->le_mode)) {
1206 get_fpr(t0, rD(ctx->opcode) + 1);
1207 gen_qemu_st64_i64(ctx, t0, EA);
1208 tcg_gen_addi_tl(EA, EA, 8);
1209 get_fpr(t0, rD(ctx->opcode));
1210 gen_qemu_st64_i64(ctx, t0, EA);
1211 } else {
1212 get_fpr(t0, rD(ctx->opcode));
1213 gen_qemu_st64_i64(ctx, t0, EA);
1214 tcg_gen_addi_tl(EA, EA, 8);
1215 get_fpr(t0, rD(ctx->opcode) + 1);
1216 gen_qemu_st64_i64(ctx, t0, EA);
1218 tcg_temp_free(EA);
1219 tcg_temp_free_i64(t0);
1222 /* stfdpx */
1223 static void gen_stfdpx(DisasContext *ctx)
1225 TCGv EA;
1226 TCGv_i64 t0;
1227 if (unlikely(!ctx->fpu_enabled)) {
1228 gen_exception(ctx, POWERPC_EXCP_FPU);
1229 return;
1231 gen_set_access_type(ctx, ACCESS_FLOAT);
1232 EA = tcg_temp_new();
1233 t0 = tcg_temp_new_i64();
1234 gen_addr_reg_index(ctx, EA);
1236 * We only need to swap high and low halves. gen_qemu_st64_i64
1237 * does necessary 64-bit byteswap already.
1239 if (unlikely(ctx->le_mode)) {
1240 get_fpr(t0, rD(ctx->opcode) + 1);
1241 gen_qemu_st64_i64(ctx, t0, EA);
1242 tcg_gen_addi_tl(EA, EA, 8);
1243 get_fpr(t0, rD(ctx->opcode));
1244 gen_qemu_st64_i64(ctx, t0, EA);
1245 } else {
1246 get_fpr(t0, rD(ctx->opcode));
1247 gen_qemu_st64_i64(ctx, t0, EA);
1248 tcg_gen_addi_tl(EA, EA, 8);
1249 get_fpr(t0, rD(ctx->opcode) + 1);
1250 gen_qemu_st64_i64(ctx, t0, EA);
1252 tcg_temp_free(EA);
1253 tcg_temp_free_i64(t0);
1256 /* Optional: */
1257 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
1259 TCGv t0 = tcg_temp_new();
1260 tcg_gen_trunc_i64_tl(t0, arg1),
1261 gen_qemu_st32(ctx, t0, arg2);
1262 tcg_temp_free(t0);
1264 /* stfiwx */
1265 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
1267 /* POWER2 specific instructions */
1268 /* Quad manipulation (load/store two floats at a time) */
1270 /* lfq */
1271 static void gen_lfq(DisasContext *ctx)
1273 int rd = rD(ctx->opcode);
1274 TCGv t0;
1275 TCGv_i64 t1;
1276 gen_set_access_type(ctx, ACCESS_FLOAT);
1277 t0 = tcg_temp_new();
1278 t1 = tcg_temp_new_i64();
1279 gen_addr_imm_index(ctx, t0, 0);
1280 gen_qemu_ld64_i64(ctx, t1, t0);
1281 set_fpr(rd, t1);
1282 gen_addr_add(ctx, t0, t0, 8);
1283 gen_qemu_ld64_i64(ctx, t1, t0);
1284 set_fpr((rd + 1) % 32, t1);
1285 tcg_temp_free(t0);
1286 tcg_temp_free_i64(t1);
1289 /* lfqu */
1290 static void gen_lfqu(DisasContext *ctx)
1292 int ra = rA(ctx->opcode);
1293 int rd = rD(ctx->opcode);
1294 TCGv t0, t1;
1295 TCGv_i64 t2;
1296 gen_set_access_type(ctx, ACCESS_FLOAT);
1297 t0 = tcg_temp_new();
1298 t1 = tcg_temp_new();
1299 t2 = tcg_temp_new_i64();
1300 gen_addr_imm_index(ctx, t0, 0);
1301 gen_qemu_ld64_i64(ctx, t2, t0);
1302 set_fpr(rd, t2);
1303 gen_addr_add(ctx, t1, t0, 8);
1304 gen_qemu_ld64_i64(ctx, t2, t1);
1305 set_fpr((rd + 1) % 32, t2);
1306 if (ra != 0) {
1307 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1309 tcg_temp_free(t0);
1310 tcg_temp_free(t1);
1311 tcg_temp_free_i64(t2);
1314 /* lfqux */
1315 static void gen_lfqux(DisasContext *ctx)
1317 int ra = rA(ctx->opcode);
1318 int rd = rD(ctx->opcode);
1319 gen_set_access_type(ctx, ACCESS_FLOAT);
1320 TCGv t0, t1;
1321 TCGv_i64 t2;
1322 t2 = tcg_temp_new_i64();
1323 t0 = tcg_temp_new();
1324 gen_addr_reg_index(ctx, t0);
1325 gen_qemu_ld64_i64(ctx, t2, t0);
1326 set_fpr(rd, t2);
1327 t1 = tcg_temp_new();
1328 gen_addr_add(ctx, t1, t0, 8);
1329 gen_qemu_ld64_i64(ctx, t2, t1);
1330 set_fpr((rd + 1) % 32, t2);
1331 tcg_temp_free(t1);
1332 if (ra != 0) {
1333 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1335 tcg_temp_free(t0);
1336 tcg_temp_free_i64(t2);
1339 /* lfqx */
1340 static void gen_lfqx(DisasContext *ctx)
1342 int rd = rD(ctx->opcode);
1343 TCGv t0;
1344 TCGv_i64 t1;
1345 gen_set_access_type(ctx, ACCESS_FLOAT);
1346 t0 = tcg_temp_new();
1347 t1 = tcg_temp_new_i64();
1348 gen_addr_reg_index(ctx, t0);
1349 gen_qemu_ld64_i64(ctx, t1, t0);
1350 set_fpr(rd, t1);
1351 gen_addr_add(ctx, t0, t0, 8);
1352 gen_qemu_ld64_i64(ctx, t1, t0);
1353 set_fpr((rd + 1) % 32, t1);
1354 tcg_temp_free(t0);
1355 tcg_temp_free_i64(t1);
1358 /* stfq */
1359 static void gen_stfq(DisasContext *ctx)
1361 int rd = rD(ctx->opcode);
1362 TCGv t0;
1363 TCGv_i64 t1;
1364 gen_set_access_type(ctx, ACCESS_FLOAT);
1365 t0 = tcg_temp_new();
1366 t1 = tcg_temp_new_i64();
1367 gen_addr_imm_index(ctx, t0, 0);
1368 get_fpr(t1, rd);
1369 gen_qemu_st64_i64(ctx, t1, t0);
1370 gen_addr_add(ctx, t0, t0, 8);
1371 get_fpr(t1, (rd + 1) % 32);
1372 gen_qemu_st64_i64(ctx, t1, t0);
1373 tcg_temp_free(t0);
1374 tcg_temp_free_i64(t1);
1377 /* stfqu */
1378 static void gen_stfqu(DisasContext *ctx)
1380 int ra = rA(ctx->opcode);
1381 int rd = rD(ctx->opcode);
1382 TCGv t0, t1;
1383 TCGv_i64 t2;
1384 gen_set_access_type(ctx, ACCESS_FLOAT);
1385 t2 = tcg_temp_new_i64();
1386 t0 = tcg_temp_new();
1387 gen_addr_imm_index(ctx, t0, 0);
1388 get_fpr(t2, rd);
1389 gen_qemu_st64_i64(ctx, t2, t0);
1390 t1 = tcg_temp_new();
1391 gen_addr_add(ctx, t1, t0, 8);
1392 get_fpr(t2, (rd + 1) % 32);
1393 gen_qemu_st64_i64(ctx, t2, t1);
1394 tcg_temp_free(t1);
1395 if (ra != 0) {
1396 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1398 tcg_temp_free(t0);
1399 tcg_temp_free_i64(t2);
1402 /* stfqux */
1403 static void gen_stfqux(DisasContext *ctx)
1405 int ra = rA(ctx->opcode);
1406 int rd = rD(ctx->opcode);
1407 TCGv t0, t1;
1408 TCGv_i64 t2;
1409 gen_set_access_type(ctx, ACCESS_FLOAT);
1410 t2 = tcg_temp_new_i64();
1411 t0 = tcg_temp_new();
1412 gen_addr_reg_index(ctx, t0);
1413 get_fpr(t2, rd);
1414 gen_qemu_st64_i64(ctx, t2, t0);
1415 t1 = tcg_temp_new();
1416 gen_addr_add(ctx, t1, t0, 8);
1417 get_fpr(t2, (rd + 1) % 32);
1418 gen_qemu_st64_i64(ctx, t2, t1);
1419 tcg_temp_free(t1);
1420 if (ra != 0) {
1421 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1423 tcg_temp_free(t0);
1424 tcg_temp_free_i64(t2);
1427 /* stfqx */
1428 static void gen_stfqx(DisasContext *ctx)
1430 int rd = rD(ctx->opcode);
1431 TCGv t0;
1432 TCGv_i64 t1;
1433 gen_set_access_type(ctx, ACCESS_FLOAT);
1434 t1 = tcg_temp_new_i64();
1435 t0 = tcg_temp_new();
1436 gen_addr_reg_index(ctx, t0);
1437 get_fpr(t1, rd);
1438 gen_qemu_st64_i64(ctx, t1, t0);
1439 gen_addr_add(ctx, t0, t0, 8);
1440 get_fpr(t1, (rd + 1) % 32);
1441 gen_qemu_st64_i64(ctx, t1, t0);
1442 tcg_temp_free(t0);
1443 tcg_temp_free_i64(t1);
1446 #undef _GEN_FLOAT_ACB
1447 #undef GEN_FLOAT_ACB
1448 #undef _GEN_FLOAT_AB
1449 #undef GEN_FLOAT_AB
1450 #undef _GEN_FLOAT_AC
1451 #undef GEN_FLOAT_AC
1452 #undef GEN_FLOAT_B
1453 #undef GEN_FLOAT_BS
1455 #undef GEN_LDF
1456 #undef GEN_LDUF
1457 #undef GEN_LDUXF
1458 #undef GEN_LDXF
1459 #undef GEN_LDFS
1461 #undef GEN_STF
1462 #undef GEN_STUF
1463 #undef GEN_STUXF
1464 #undef GEN_STXF
1465 #undef GEN_STFS