docs: add Orange Pi PC document
[qemu/ar7.git] / target / ppc / translate / fp-impl.inc.c
blob9f7868ee2801365632a307a957d3c846fe796bb4
1 /*
2 * translate-fp.c
4 * Standard FPU translation
5 */
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(cpu_env, arg);
15 gen_helper_float_check_status(cpu_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
24 tcg_temp_free_i32(tmp);
26 #else
27 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
31 #endif
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
36 { \
37 TCGv_i64 t0; \
38 TCGv_i64 t1; \
39 TCGv_i64 t2; \
40 TCGv_i64 t3; \
41 if (unlikely(!ctx->fpu_enabled)) { \
42 gen_exception(ctx, POWERPC_EXCP_FPU); \
43 return; \
44 } \
45 t0 = tcg_temp_new_i64(); \
46 t1 = tcg_temp_new_i64(); \
47 t2 = tcg_temp_new_i64(); \
48 t3 = tcg_temp_new_i64(); \
49 gen_reset_fpstatus(); \
50 get_fpr(t0, rA(ctx->opcode)); \
51 get_fpr(t1, rC(ctx->opcode)); \
52 get_fpr(t2, rB(ctx->opcode)); \
53 gen_helper_f##op(t3, cpu_env, t0, t1, t2); \
54 if (isfloat) { \
55 gen_helper_frsp(t3, cpu_env, t3); \
56 } \
57 set_fpr(rD(ctx->opcode), t3); \
58 if (set_fprf) { \
59 gen_compute_fprf_float64(t3); \
60 } \
61 if (unlikely(Rc(ctx->opcode) != 0)) { \
62 gen_set_cr1_from_fpscr(ctx); \
63 } \
64 tcg_temp_free_i64(t0); \
65 tcg_temp_free_i64(t1); \
66 tcg_temp_free_i64(t2); \
67 tcg_temp_free_i64(t3); \
70 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
71 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
72 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
74 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
75 static void gen_f##name(DisasContext *ctx) \
76 { \
77 TCGv_i64 t0; \
78 TCGv_i64 t1; \
79 TCGv_i64 t2; \
80 if (unlikely(!ctx->fpu_enabled)) { \
81 gen_exception(ctx, POWERPC_EXCP_FPU); \
82 return; \
83 } \
84 t0 = tcg_temp_new_i64(); \
85 t1 = tcg_temp_new_i64(); \
86 t2 = tcg_temp_new_i64(); \
87 gen_reset_fpstatus(); \
88 get_fpr(t0, rA(ctx->opcode)); \
89 get_fpr(t1, rB(ctx->opcode)); \
90 gen_helper_f##op(t2, cpu_env, t0, t1); \
91 if (isfloat) { \
92 gen_helper_frsp(t2, cpu_env, t2); \
93 } \
94 set_fpr(rD(ctx->opcode), t2); \
95 if (set_fprf) { \
96 gen_compute_fprf_float64(t2); \
97 } \
98 if (unlikely(Rc(ctx->opcode) != 0)) { \
99 gen_set_cr1_from_fpscr(ctx); \
101 tcg_temp_free_i64(t0); \
102 tcg_temp_free_i64(t1); \
103 tcg_temp_free_i64(t2); \
105 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
106 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
107 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
109 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
110 static void gen_f##name(DisasContext *ctx) \
112 TCGv_i64 t0; \
113 TCGv_i64 t1; \
114 TCGv_i64 t2; \
115 if (unlikely(!ctx->fpu_enabled)) { \
116 gen_exception(ctx, POWERPC_EXCP_FPU); \
117 return; \
119 t0 = tcg_temp_new_i64(); \
120 t1 = tcg_temp_new_i64(); \
121 t2 = tcg_temp_new_i64(); \
122 gen_reset_fpstatus(); \
123 get_fpr(t0, rA(ctx->opcode)); \
124 get_fpr(t1, rC(ctx->opcode)); \
125 gen_helper_f##op(t2, cpu_env, t0, t1); \
126 if (isfloat) { \
127 gen_helper_frsp(t2, cpu_env, t2); \
129 set_fpr(rD(ctx->opcode), t2); \
130 if (set_fprf) { \
131 gen_compute_fprf_float64(t2); \
133 if (unlikely(Rc(ctx->opcode) != 0)) { \
134 gen_set_cr1_from_fpscr(ctx); \
136 tcg_temp_free_i64(t0); \
137 tcg_temp_free_i64(t1); \
138 tcg_temp_free_i64(t2); \
140 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
141 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
142 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
144 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
145 static void gen_f##name(DisasContext *ctx) \
147 TCGv_i64 t0; \
148 TCGv_i64 t1; \
149 if (unlikely(!ctx->fpu_enabled)) { \
150 gen_exception(ctx, POWERPC_EXCP_FPU); \
151 return; \
153 t0 = tcg_temp_new_i64(); \
154 t1 = tcg_temp_new_i64(); \
155 gen_reset_fpstatus(); \
156 get_fpr(t0, rB(ctx->opcode)); \
157 gen_helper_f##name(t1, cpu_env, t0); \
158 set_fpr(rD(ctx->opcode), t1); \
159 if (set_fprf) { \
160 gen_compute_fprf_float64(t1); \
162 if (unlikely(Rc(ctx->opcode) != 0)) { \
163 gen_set_cr1_from_fpscr(ctx); \
165 tcg_temp_free_i64(t0); \
166 tcg_temp_free_i64(t1); \
169 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
170 static void gen_f##name(DisasContext *ctx) \
172 TCGv_i64 t0; \
173 TCGv_i64 t1; \
174 if (unlikely(!ctx->fpu_enabled)) { \
175 gen_exception(ctx, POWERPC_EXCP_FPU); \
176 return; \
178 t0 = tcg_temp_new_i64(); \
179 t1 = tcg_temp_new_i64(); \
180 gen_reset_fpstatus(); \
181 get_fpr(t0, rB(ctx->opcode)); \
182 gen_helper_f##name(t1, cpu_env, t0); \
183 set_fpr(rD(ctx->opcode), t1); \
184 if (set_fprf) { \
185 gen_compute_fprf_float64(t1); \
187 if (unlikely(Rc(ctx->opcode) != 0)) { \
188 gen_set_cr1_from_fpscr(ctx); \
190 tcg_temp_free_i64(t0); \
191 tcg_temp_free_i64(t1); \
194 /* fadd - fadds */
195 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
196 /* fdiv - fdivs */
197 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
198 /* fmul - fmuls */
199 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
201 /* fre */
202 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
204 /* fres */
205 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
207 /* frsqrte */
208 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
210 /* frsqrtes */
211 static void gen_frsqrtes(DisasContext *ctx)
213 TCGv_i64 t0;
214 TCGv_i64 t1;
215 if (unlikely(!ctx->fpu_enabled)) {
216 gen_exception(ctx, POWERPC_EXCP_FPU);
217 return;
219 t0 = tcg_temp_new_i64();
220 t1 = tcg_temp_new_i64();
221 gen_reset_fpstatus();
222 get_fpr(t0, rB(ctx->opcode));
223 gen_helper_frsqrte(t1, cpu_env, t0);
224 gen_helper_frsp(t1, cpu_env, t1);
225 set_fpr(rD(ctx->opcode), t1);
226 gen_compute_fprf_float64(t1);
227 if (unlikely(Rc(ctx->opcode) != 0)) {
228 gen_set_cr1_from_fpscr(ctx);
230 tcg_temp_free_i64(t0);
231 tcg_temp_free_i64(t1);
234 /* fsel */
235 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
236 /* fsub - fsubs */
237 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
238 /* Optional: */
240 /* fsqrt */
241 static void gen_fsqrt(DisasContext *ctx)
243 TCGv_i64 t0;
244 TCGv_i64 t1;
245 if (unlikely(!ctx->fpu_enabled)) {
246 gen_exception(ctx, POWERPC_EXCP_FPU);
247 return;
249 t0 = tcg_temp_new_i64();
250 t1 = tcg_temp_new_i64();
251 gen_reset_fpstatus();
252 get_fpr(t0, rB(ctx->opcode));
253 gen_helper_fsqrt(t1, cpu_env, t0);
254 set_fpr(rD(ctx->opcode), t1);
255 gen_compute_fprf_float64(t1);
256 if (unlikely(Rc(ctx->opcode) != 0)) {
257 gen_set_cr1_from_fpscr(ctx);
259 tcg_temp_free_i64(t0);
260 tcg_temp_free_i64(t1);
263 static void gen_fsqrts(DisasContext *ctx)
265 TCGv_i64 t0;
266 TCGv_i64 t1;
267 if (unlikely(!ctx->fpu_enabled)) {
268 gen_exception(ctx, POWERPC_EXCP_FPU);
269 return;
271 t0 = tcg_temp_new_i64();
272 t1 = tcg_temp_new_i64();
273 gen_reset_fpstatus();
274 get_fpr(t0, rB(ctx->opcode));
275 gen_helper_fsqrt(t1, cpu_env, t0);
276 gen_helper_frsp(t1, cpu_env, t1);
277 set_fpr(rD(ctx->opcode), t1);
278 gen_compute_fprf_float64(t1);
279 if (unlikely(Rc(ctx->opcode) != 0)) {
280 gen_set_cr1_from_fpscr(ctx);
282 tcg_temp_free_i64(t0);
283 tcg_temp_free_i64(t1);
286 /*** Floating-Point multiply-and-add ***/
287 /* fmadd - fmadds */
288 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
289 /* fmsub - fmsubs */
290 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
291 /* fnmadd - fnmadds */
292 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
293 /* fnmsub - fnmsubs */
294 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
296 /*** Floating-Point round & convert ***/
297 /* fctiw */
298 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
299 /* fctiwu */
300 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
301 /* fctiwz */
302 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
303 /* fctiwuz */
304 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
305 /* frsp */
306 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
307 /* fcfid */
308 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
309 /* fcfids */
310 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
311 /* fcfidu */
312 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
313 /* fcfidus */
314 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
315 /* fctid */
316 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
317 /* fctidu */
318 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
319 /* fctidz */
320 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
321 /* fctidu */
322 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
324 /* frin */
325 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
326 /* friz */
327 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
328 /* frip */
329 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
330 /* frim */
331 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
333 static void gen_ftdiv(DisasContext *ctx)
335 TCGv_i64 t0;
336 TCGv_i64 t1;
337 if (unlikely(!ctx->fpu_enabled)) {
338 gen_exception(ctx, POWERPC_EXCP_FPU);
339 return;
341 t0 = tcg_temp_new_i64();
342 t1 = tcg_temp_new_i64();
343 get_fpr(t0, rA(ctx->opcode));
344 get_fpr(t1, rB(ctx->opcode));
345 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], t0, t1);
346 tcg_temp_free_i64(t0);
347 tcg_temp_free_i64(t1);
350 static void gen_ftsqrt(DisasContext *ctx)
352 TCGv_i64 t0;
353 if (unlikely(!ctx->fpu_enabled)) {
354 gen_exception(ctx, POWERPC_EXCP_FPU);
355 return;
357 t0 = tcg_temp_new_i64();
358 get_fpr(t0, rB(ctx->opcode));
359 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], t0);
360 tcg_temp_free_i64(t0);
365 /*** Floating-Point compare ***/
367 /* fcmpo */
368 static void gen_fcmpo(DisasContext *ctx)
370 TCGv_i32 crf;
371 TCGv_i64 t0;
372 TCGv_i64 t1;
373 if (unlikely(!ctx->fpu_enabled)) {
374 gen_exception(ctx, POWERPC_EXCP_FPU);
375 return;
377 t0 = tcg_temp_new_i64();
378 t1 = tcg_temp_new_i64();
379 gen_reset_fpstatus();
380 crf = tcg_const_i32(crfD(ctx->opcode));
381 get_fpr(t0, rA(ctx->opcode));
382 get_fpr(t1, rB(ctx->opcode));
383 gen_helper_fcmpo(cpu_env, t0, t1, crf);
384 tcg_temp_free_i32(crf);
385 gen_helper_float_check_status(cpu_env);
386 tcg_temp_free_i64(t0);
387 tcg_temp_free_i64(t1);
390 /* fcmpu */
391 static void gen_fcmpu(DisasContext *ctx)
393 TCGv_i32 crf;
394 TCGv_i64 t0;
395 TCGv_i64 t1;
396 if (unlikely(!ctx->fpu_enabled)) {
397 gen_exception(ctx, POWERPC_EXCP_FPU);
398 return;
400 t0 = tcg_temp_new_i64();
401 t1 = tcg_temp_new_i64();
402 gen_reset_fpstatus();
403 crf = tcg_const_i32(crfD(ctx->opcode));
404 get_fpr(t0, rA(ctx->opcode));
405 get_fpr(t1, rB(ctx->opcode));
406 gen_helper_fcmpu(cpu_env, t0, t1, crf);
407 tcg_temp_free_i32(crf);
408 gen_helper_float_check_status(cpu_env);
409 tcg_temp_free_i64(t0);
410 tcg_temp_free_i64(t1);
413 /*** Floating-point move ***/
414 /* fabs */
415 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
416 static void gen_fabs(DisasContext *ctx)
418 TCGv_i64 t0;
419 TCGv_i64 t1;
420 if (unlikely(!ctx->fpu_enabled)) {
421 gen_exception(ctx, POWERPC_EXCP_FPU);
422 return;
424 t0 = tcg_temp_new_i64();
425 t1 = tcg_temp_new_i64();
426 get_fpr(t0, rB(ctx->opcode));
427 tcg_gen_andi_i64(t1, t0, ~(1ULL << 63));
428 set_fpr(rD(ctx->opcode), t1);
429 if (unlikely(Rc(ctx->opcode))) {
430 gen_set_cr1_from_fpscr(ctx);
432 tcg_temp_free_i64(t0);
433 tcg_temp_free_i64(t1);
436 /* fmr - fmr. */
437 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
438 static void gen_fmr(DisasContext *ctx)
440 TCGv_i64 t0;
441 if (unlikely(!ctx->fpu_enabled)) {
442 gen_exception(ctx, POWERPC_EXCP_FPU);
443 return;
445 t0 = tcg_temp_new_i64();
446 get_fpr(t0, rB(ctx->opcode));
447 set_fpr(rD(ctx->opcode), t0);
448 if (unlikely(Rc(ctx->opcode))) {
449 gen_set_cr1_from_fpscr(ctx);
451 tcg_temp_free_i64(t0);
454 /* fnabs */
455 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
456 static void gen_fnabs(DisasContext *ctx)
458 TCGv_i64 t0;
459 TCGv_i64 t1;
460 if (unlikely(!ctx->fpu_enabled)) {
461 gen_exception(ctx, POWERPC_EXCP_FPU);
462 return;
464 t0 = tcg_temp_new_i64();
465 t1 = tcg_temp_new_i64();
466 get_fpr(t0, rB(ctx->opcode));
467 tcg_gen_ori_i64(t1, t0, 1ULL << 63);
468 set_fpr(rD(ctx->opcode), t1);
469 if (unlikely(Rc(ctx->opcode))) {
470 gen_set_cr1_from_fpscr(ctx);
472 tcg_temp_free_i64(t0);
473 tcg_temp_free_i64(t1);
476 /* fneg */
477 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
478 static void gen_fneg(DisasContext *ctx)
480 TCGv_i64 t0;
481 TCGv_i64 t1;
482 if (unlikely(!ctx->fpu_enabled)) {
483 gen_exception(ctx, POWERPC_EXCP_FPU);
484 return;
486 t0 = tcg_temp_new_i64();
487 t1 = tcg_temp_new_i64();
488 get_fpr(t0, rB(ctx->opcode));
489 tcg_gen_xori_i64(t1, t0, 1ULL << 63);
490 set_fpr(rD(ctx->opcode), t1);
491 if (unlikely(Rc(ctx->opcode))) {
492 gen_set_cr1_from_fpscr(ctx);
494 tcg_temp_free_i64(t0);
495 tcg_temp_free_i64(t1);
498 /* fcpsgn: PowerPC 2.05 specification */
499 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
500 static void gen_fcpsgn(DisasContext *ctx)
502 TCGv_i64 t0;
503 TCGv_i64 t1;
504 TCGv_i64 t2;
505 if (unlikely(!ctx->fpu_enabled)) {
506 gen_exception(ctx, POWERPC_EXCP_FPU);
507 return;
509 t0 = tcg_temp_new_i64();
510 t1 = tcg_temp_new_i64();
511 t2 = tcg_temp_new_i64();
512 get_fpr(t0, rA(ctx->opcode));
513 get_fpr(t1, rB(ctx->opcode));
514 tcg_gen_deposit_i64(t2, t0, t1, 0, 63);
515 set_fpr(rD(ctx->opcode), t2);
516 if (unlikely(Rc(ctx->opcode))) {
517 gen_set_cr1_from_fpscr(ctx);
519 tcg_temp_free_i64(t0);
520 tcg_temp_free_i64(t1);
521 tcg_temp_free_i64(t2);
524 static void gen_fmrgew(DisasContext *ctx)
526 TCGv_i64 b0;
527 TCGv_i64 t0;
528 TCGv_i64 t1;
529 if (unlikely(!ctx->fpu_enabled)) {
530 gen_exception(ctx, POWERPC_EXCP_FPU);
531 return;
533 b0 = tcg_temp_new_i64();
534 t0 = tcg_temp_new_i64();
535 t1 = tcg_temp_new_i64();
536 get_fpr(t0, rB(ctx->opcode));
537 tcg_gen_shri_i64(b0, t0, 32);
538 get_fpr(t0, rA(ctx->opcode));
539 tcg_gen_deposit_i64(t1, t0, b0, 0, 32);
540 set_fpr(rD(ctx->opcode), t1);
541 tcg_temp_free_i64(b0);
542 tcg_temp_free_i64(t0);
543 tcg_temp_free_i64(t1);
546 static void gen_fmrgow(DisasContext *ctx)
548 TCGv_i64 t0;
549 TCGv_i64 t1;
550 TCGv_i64 t2;
551 if (unlikely(!ctx->fpu_enabled)) {
552 gen_exception(ctx, POWERPC_EXCP_FPU);
553 return;
555 t0 = tcg_temp_new_i64();
556 t1 = tcg_temp_new_i64();
557 t2 = tcg_temp_new_i64();
558 get_fpr(t0, rB(ctx->opcode));
559 get_fpr(t1, rA(ctx->opcode));
560 tcg_gen_deposit_i64(t2, t0, t1, 32, 32);
561 set_fpr(rD(ctx->opcode), t2);
562 tcg_temp_free_i64(t0);
563 tcg_temp_free_i64(t1);
564 tcg_temp_free_i64(t2);
567 /*** Floating-Point status & ctrl register ***/
569 /* mcrfs */
570 static void gen_mcrfs(DisasContext *ctx)
572 TCGv tmp = tcg_temp_new();
573 TCGv_i32 tmask;
574 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
575 int bfa;
576 int nibble;
577 int shift;
579 if (unlikely(!ctx->fpu_enabled)) {
580 gen_exception(ctx, POWERPC_EXCP_FPU);
581 return;
583 bfa = crfS(ctx->opcode);
584 nibble = 7 - bfa;
585 shift = 4 * nibble;
586 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
587 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
588 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)],
589 0xf);
590 tcg_temp_free(tmp);
591 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
592 /* Only the exception bits (including FX) should be cleared if read */
593 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr,
594 ~((0xF << shift) & FP_EX_CLEAR_BITS));
595 /* FEX and VX need to be updated, so don't set fpscr directly */
596 tmask = tcg_const_i32(1 << nibble);
597 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask);
598 tcg_temp_free_i32(tmask);
599 tcg_temp_free_i64(tnew_fpscr);
602 /* mffs */
603 static void gen_mffs(DisasContext *ctx)
605 TCGv_i64 t0;
606 if (unlikely(!ctx->fpu_enabled)) {
607 gen_exception(ctx, POWERPC_EXCP_FPU);
608 return;
610 t0 = tcg_temp_new_i64();
611 gen_reset_fpstatus();
612 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
613 set_fpr(rD(ctx->opcode), t0);
614 if (unlikely(Rc(ctx->opcode))) {
615 gen_set_cr1_from_fpscr(ctx);
617 tcg_temp_free_i64(t0);
620 /* mffsl */
621 static void gen_mffsl(DisasContext *ctx)
623 TCGv_i64 t0;
625 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
626 return gen_mffs(ctx);
629 if (unlikely(!ctx->fpu_enabled)) {
630 gen_exception(ctx, POWERPC_EXCP_FPU);
631 return;
633 t0 = tcg_temp_new_i64();
634 gen_reset_fpstatus();
635 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
636 /* Mask everything except mode, status, and enables. */
637 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_STATUS | FP_ENABLES | FP_RN);
638 set_fpr(rD(ctx->opcode), t0);
639 tcg_temp_free_i64(t0);
642 /* mffsce */
643 static void gen_mffsce(DisasContext *ctx)
645 TCGv_i64 t0;
646 TCGv_i32 mask;
648 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
649 return gen_mffs(ctx);
652 if (unlikely(!ctx->fpu_enabled)) {
653 gen_exception(ctx, POWERPC_EXCP_FPU);
654 return;
657 t0 = tcg_temp_new_i64();
659 gen_reset_fpstatus();
660 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
661 set_fpr(rD(ctx->opcode), t0);
663 /* Clear exception enable bits in the FPSCR. */
664 tcg_gen_andi_i64(t0, t0, ~FP_ENABLES);
665 mask = tcg_const_i32(0x0003);
666 gen_helper_store_fpscr(cpu_env, t0, mask);
668 tcg_temp_free_i32(mask);
669 tcg_temp_free_i64(t0);
672 static void gen_helper_mffscrn(DisasContext *ctx, TCGv_i64 t1)
674 TCGv_i64 t0 = tcg_temp_new_i64();
675 TCGv_i32 mask = tcg_const_i32(0x0001);
677 gen_reset_fpstatus();
678 tcg_gen_extu_tl_i64(t0, cpu_fpscr);
679 tcg_gen_andi_i64(t0, t0, FP_DRN | FP_ENABLES | FP_RN);
680 set_fpr(rD(ctx->opcode), t0);
682 /* Mask FPSCR value to clear RN. */
683 tcg_gen_andi_i64(t0, t0, ~FP_RN);
685 /* Merge RN into FPSCR value. */
686 tcg_gen_or_i64(t0, t0, t1);
688 gen_helper_store_fpscr(cpu_env, t0, mask);
690 tcg_temp_free_i32(mask);
691 tcg_temp_free_i64(t0);
694 /* mffscrn */
695 static void gen_mffscrn(DisasContext *ctx)
697 TCGv_i64 t1;
699 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
700 return gen_mffs(ctx);
703 if (unlikely(!ctx->fpu_enabled)) {
704 gen_exception(ctx, POWERPC_EXCP_FPU);
705 return;
708 t1 = tcg_temp_new_i64();
709 get_fpr(t1, rB(ctx->opcode));
710 /* Mask FRB to get just RN. */
711 tcg_gen_andi_i64(t1, t1, FP_RN);
713 gen_helper_mffscrn(ctx, t1);
715 tcg_temp_free_i64(t1);
718 /* mffscrni */
719 static void gen_mffscrni(DisasContext *ctx)
721 TCGv_i64 t1;
723 if (unlikely(!(ctx->insns_flags2 & PPC2_ISA300))) {
724 return gen_mffs(ctx);
727 if (unlikely(!ctx->fpu_enabled)) {
728 gen_exception(ctx, POWERPC_EXCP_FPU);
729 return;
732 t1 = tcg_const_i64((uint64_t)RM(ctx->opcode));
734 gen_helper_mffscrn(ctx, t1);
736 tcg_temp_free_i64(t1);
739 /* mtfsb0 */
740 static void gen_mtfsb0(DisasContext *ctx)
742 uint8_t crb;
744 if (unlikely(!ctx->fpu_enabled)) {
745 gen_exception(ctx, POWERPC_EXCP_FPU);
746 return;
748 crb = 31 - crbD(ctx->opcode);
749 gen_reset_fpstatus();
750 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
751 TCGv_i32 t0;
752 t0 = tcg_const_i32(crb);
753 gen_helper_fpscr_clrbit(cpu_env, t0);
754 tcg_temp_free_i32(t0);
756 if (unlikely(Rc(ctx->opcode) != 0)) {
757 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
758 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
762 /* mtfsb1 */
763 static void gen_mtfsb1(DisasContext *ctx)
765 uint8_t crb;
767 if (unlikely(!ctx->fpu_enabled)) {
768 gen_exception(ctx, POWERPC_EXCP_FPU);
769 return;
771 crb = 31 - crbD(ctx->opcode);
772 gen_reset_fpstatus();
773 /* XXX: we pretend we can only do IEEE floating-point computations */
774 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
775 TCGv_i32 t0;
776 t0 = tcg_const_i32(crb);
777 gen_helper_fpscr_setbit(cpu_env, t0);
778 tcg_temp_free_i32(t0);
780 if (unlikely(Rc(ctx->opcode) != 0)) {
781 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
782 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
784 /* We can raise a deferred exception */
785 gen_helper_float_check_status(cpu_env);
788 /* mtfsf */
789 static void gen_mtfsf(DisasContext *ctx)
791 TCGv_i32 t0;
792 TCGv_i64 t1;
793 int flm, l, w;
795 if (unlikely(!ctx->fpu_enabled)) {
796 gen_exception(ctx, POWERPC_EXCP_FPU);
797 return;
799 flm = FPFLM(ctx->opcode);
800 l = FPL(ctx->opcode);
801 w = FPW(ctx->opcode);
802 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
803 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
804 return;
806 gen_reset_fpstatus();
807 if (l) {
808 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
809 } else {
810 t0 = tcg_const_i32(flm << (w * 8));
812 t1 = tcg_temp_new_i64();
813 get_fpr(t1, rB(ctx->opcode));
814 gen_helper_store_fpscr(cpu_env, t1, t0);
815 tcg_temp_free_i32(t0);
816 if (unlikely(Rc(ctx->opcode) != 0)) {
817 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
818 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
820 /* We can raise a deferred exception */
821 gen_helper_float_check_status(cpu_env);
822 tcg_temp_free_i64(t1);
825 /* mtfsfi */
826 static void gen_mtfsfi(DisasContext *ctx)
828 int bf, sh, w;
829 TCGv_i64 t0;
830 TCGv_i32 t1;
832 if (unlikely(!ctx->fpu_enabled)) {
833 gen_exception(ctx, POWERPC_EXCP_FPU);
834 return;
836 w = FPW(ctx->opcode);
837 bf = FPBF(ctx->opcode);
838 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
839 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
840 return;
842 sh = (8 * w) + 7 - bf;
843 gen_reset_fpstatus();
844 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
845 t1 = tcg_const_i32(1 << sh);
846 gen_helper_store_fpscr(cpu_env, t0, t1);
847 tcg_temp_free_i64(t0);
848 tcg_temp_free_i32(t1);
849 if (unlikely(Rc(ctx->opcode) != 0)) {
850 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
851 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
853 /* We can raise a deferred exception */
854 gen_helper_float_check_status(cpu_env);
857 /*** Floating-point load ***/
858 #define GEN_LDF(name, ldop, opc, type) \
859 static void glue(gen_, name)(DisasContext *ctx) \
861 TCGv EA; \
862 TCGv_i64 t0; \
863 if (unlikely(!ctx->fpu_enabled)) { \
864 gen_exception(ctx, POWERPC_EXCP_FPU); \
865 return; \
867 gen_set_access_type(ctx, ACCESS_FLOAT); \
868 EA = tcg_temp_new(); \
869 t0 = tcg_temp_new_i64(); \
870 gen_addr_imm_index(ctx, EA, 0); \
871 gen_qemu_##ldop(ctx, t0, EA); \
872 set_fpr(rD(ctx->opcode), t0); \
873 tcg_temp_free(EA); \
874 tcg_temp_free_i64(t0); \
877 #define GEN_LDUF(name, ldop, opc, type) \
878 static void glue(gen_, name##u)(DisasContext *ctx) \
880 TCGv EA; \
881 TCGv_i64 t0; \
882 if (unlikely(!ctx->fpu_enabled)) { \
883 gen_exception(ctx, POWERPC_EXCP_FPU); \
884 return; \
886 if (unlikely(rA(ctx->opcode) == 0)) { \
887 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
888 return; \
890 gen_set_access_type(ctx, ACCESS_FLOAT); \
891 EA = tcg_temp_new(); \
892 t0 = tcg_temp_new_i64(); \
893 gen_addr_imm_index(ctx, EA, 0); \
894 gen_qemu_##ldop(ctx, t0, EA); \
895 set_fpr(rD(ctx->opcode), t0); \
896 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
897 tcg_temp_free(EA); \
898 tcg_temp_free_i64(t0); \
901 #define GEN_LDUXF(name, ldop, opc, type) \
902 static void glue(gen_, name##ux)(DisasContext *ctx) \
904 TCGv EA; \
905 TCGv_i64 t0; \
906 if (unlikely(!ctx->fpu_enabled)) { \
907 gen_exception(ctx, POWERPC_EXCP_FPU); \
908 return; \
910 t0 = tcg_temp_new_i64(); \
911 if (unlikely(rA(ctx->opcode) == 0)) { \
912 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
913 return; \
915 gen_set_access_type(ctx, ACCESS_FLOAT); \
916 EA = tcg_temp_new(); \
917 gen_addr_reg_index(ctx, EA); \
918 gen_qemu_##ldop(ctx, t0, EA); \
919 set_fpr(rD(ctx->opcode), t0); \
920 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
921 tcg_temp_free(EA); \
922 tcg_temp_free_i64(t0); \
925 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
926 static void glue(gen_, name##x)(DisasContext *ctx) \
928 TCGv EA; \
929 TCGv_i64 t0; \
930 if (unlikely(!ctx->fpu_enabled)) { \
931 gen_exception(ctx, POWERPC_EXCP_FPU); \
932 return; \
934 gen_set_access_type(ctx, ACCESS_FLOAT); \
935 EA = tcg_temp_new(); \
936 t0 = tcg_temp_new_i64(); \
937 gen_addr_reg_index(ctx, EA); \
938 gen_qemu_##ldop(ctx, t0, EA); \
939 set_fpr(rD(ctx->opcode), t0); \
940 tcg_temp_free(EA); \
941 tcg_temp_free_i64(t0); \
944 #define GEN_LDFS(name, ldop, op, type) \
945 GEN_LDF(name, ldop, op | 0x20, type); \
946 GEN_LDUF(name, ldop, op | 0x21, type); \
947 GEN_LDUXF(name, ldop, op | 0x01, type); \
948 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
950 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
952 TCGv_i32 tmp = tcg_temp_new_i32();
953 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
954 gen_helper_todouble(dest, tmp);
955 tcg_temp_free_i32(tmp);
958 /* lfd lfdu lfdux lfdx */
959 GEN_LDFS(lfd, ld64_i64, 0x12, PPC_FLOAT);
960 /* lfs lfsu lfsux lfsx */
961 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
963 /* lfdepx (external PID lfdx) */
964 static void gen_lfdepx(DisasContext *ctx)
966 TCGv EA;
967 TCGv_i64 t0;
968 CHK_SV;
969 if (unlikely(!ctx->fpu_enabled)) {
970 gen_exception(ctx, POWERPC_EXCP_FPU);
971 return;
973 gen_set_access_type(ctx, ACCESS_FLOAT);
974 EA = tcg_temp_new();
975 t0 = tcg_temp_new_i64();
976 gen_addr_reg_index(ctx, EA);
977 tcg_gen_qemu_ld_i64(t0, EA, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_Q));
978 set_fpr(rD(ctx->opcode), t0);
979 tcg_temp_free(EA);
980 tcg_temp_free_i64(t0);
983 /* lfdp */
984 static void gen_lfdp(DisasContext *ctx)
986 TCGv EA;
987 TCGv_i64 t0;
988 if (unlikely(!ctx->fpu_enabled)) {
989 gen_exception(ctx, POWERPC_EXCP_FPU);
990 return;
992 gen_set_access_type(ctx, ACCESS_FLOAT);
993 EA = tcg_temp_new();
994 gen_addr_imm_index(ctx, EA, 0);
995 t0 = tcg_temp_new_i64();
997 * We only need to swap high and low halves. gen_qemu_ld64_i64
998 * does necessary 64-bit byteswap already.
1000 if (unlikely(ctx->le_mode)) {
1001 gen_qemu_ld64_i64(ctx, t0, EA);
1002 set_fpr(rD(ctx->opcode) + 1, t0);
1003 tcg_gen_addi_tl(EA, EA, 8);
1004 gen_qemu_ld64_i64(ctx, t0, EA);
1005 set_fpr(rD(ctx->opcode), t0);
1006 } else {
1007 gen_qemu_ld64_i64(ctx, t0, EA);
1008 set_fpr(rD(ctx->opcode), t0);
1009 tcg_gen_addi_tl(EA, EA, 8);
1010 gen_qemu_ld64_i64(ctx, t0, EA);
1011 set_fpr(rD(ctx->opcode) + 1, t0);
1013 tcg_temp_free(EA);
1014 tcg_temp_free_i64(t0);
1017 /* lfdpx */
1018 static void gen_lfdpx(DisasContext *ctx)
1020 TCGv EA;
1021 TCGv_i64 t0;
1022 if (unlikely(!ctx->fpu_enabled)) {
1023 gen_exception(ctx, POWERPC_EXCP_FPU);
1024 return;
1026 gen_set_access_type(ctx, ACCESS_FLOAT);
1027 EA = tcg_temp_new();
1028 gen_addr_reg_index(ctx, EA);
1029 t0 = tcg_temp_new_i64();
1031 * We only need to swap high and low halves. gen_qemu_ld64_i64
1032 * does necessary 64-bit byteswap already.
1034 if (unlikely(ctx->le_mode)) {
1035 gen_qemu_ld64_i64(ctx, t0, EA);
1036 set_fpr(rD(ctx->opcode) + 1, t0);
1037 tcg_gen_addi_tl(EA, EA, 8);
1038 gen_qemu_ld64_i64(ctx, t0, EA);
1039 set_fpr(rD(ctx->opcode), t0);
1040 } else {
1041 gen_qemu_ld64_i64(ctx, t0, EA);
1042 set_fpr(rD(ctx->opcode), t0);
1043 tcg_gen_addi_tl(EA, EA, 8);
1044 gen_qemu_ld64_i64(ctx, t0, EA);
1045 set_fpr(rD(ctx->opcode) + 1, t0);
1047 tcg_temp_free(EA);
1048 tcg_temp_free_i64(t0);
1051 /* lfiwax */
1052 static void gen_lfiwax(DisasContext *ctx)
1054 TCGv EA;
1055 TCGv t0;
1056 TCGv_i64 t1;
1057 if (unlikely(!ctx->fpu_enabled)) {
1058 gen_exception(ctx, POWERPC_EXCP_FPU);
1059 return;
1061 gen_set_access_type(ctx, ACCESS_FLOAT);
1062 EA = tcg_temp_new();
1063 t0 = tcg_temp_new();
1064 t1 = tcg_temp_new_i64();
1065 gen_addr_reg_index(ctx, EA);
1066 gen_qemu_ld32s(ctx, t0, EA);
1067 tcg_gen_ext_tl_i64(t1, t0);
1068 set_fpr(rD(ctx->opcode), t1);
1069 tcg_temp_free(EA);
1070 tcg_temp_free(t0);
1071 tcg_temp_free_i64(t1);
1074 /* lfiwzx */
1075 static void gen_lfiwzx(DisasContext *ctx)
1077 TCGv EA;
1078 TCGv_i64 t0;
1079 if (unlikely(!ctx->fpu_enabled)) {
1080 gen_exception(ctx, POWERPC_EXCP_FPU);
1081 return;
1083 gen_set_access_type(ctx, ACCESS_FLOAT);
1084 EA = tcg_temp_new();
1085 t0 = tcg_temp_new_i64();
1086 gen_addr_reg_index(ctx, EA);
1087 gen_qemu_ld32u_i64(ctx, t0, EA);
1088 set_fpr(rD(ctx->opcode), t0);
1089 tcg_temp_free(EA);
1090 tcg_temp_free_i64(t0);
1092 /*** Floating-point store ***/
1093 #define GEN_STF(name, stop, opc, type) \
1094 static void glue(gen_, name)(DisasContext *ctx) \
1096 TCGv EA; \
1097 TCGv_i64 t0; \
1098 if (unlikely(!ctx->fpu_enabled)) { \
1099 gen_exception(ctx, POWERPC_EXCP_FPU); \
1100 return; \
1102 gen_set_access_type(ctx, ACCESS_FLOAT); \
1103 EA = tcg_temp_new(); \
1104 t0 = tcg_temp_new_i64(); \
1105 gen_addr_imm_index(ctx, EA, 0); \
1106 get_fpr(t0, rS(ctx->opcode)); \
1107 gen_qemu_##stop(ctx, t0, EA); \
1108 tcg_temp_free(EA); \
1109 tcg_temp_free_i64(t0); \
1112 #define GEN_STUF(name, stop, opc, type) \
1113 static void glue(gen_, name##u)(DisasContext *ctx) \
1115 TCGv EA; \
1116 TCGv_i64 t0; \
1117 if (unlikely(!ctx->fpu_enabled)) { \
1118 gen_exception(ctx, POWERPC_EXCP_FPU); \
1119 return; \
1121 if (unlikely(rA(ctx->opcode) == 0)) { \
1122 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1123 return; \
1125 gen_set_access_type(ctx, ACCESS_FLOAT); \
1126 EA = tcg_temp_new(); \
1127 t0 = tcg_temp_new_i64(); \
1128 gen_addr_imm_index(ctx, EA, 0); \
1129 get_fpr(t0, rS(ctx->opcode)); \
1130 gen_qemu_##stop(ctx, t0, EA); \
1131 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1132 tcg_temp_free(EA); \
1133 tcg_temp_free_i64(t0); \
1136 #define GEN_STUXF(name, stop, opc, type) \
1137 static void glue(gen_, name##ux)(DisasContext *ctx) \
1139 TCGv EA; \
1140 TCGv_i64 t0; \
1141 if (unlikely(!ctx->fpu_enabled)) { \
1142 gen_exception(ctx, POWERPC_EXCP_FPU); \
1143 return; \
1145 if (unlikely(rA(ctx->opcode) == 0)) { \
1146 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
1147 return; \
1149 gen_set_access_type(ctx, ACCESS_FLOAT); \
1150 EA = tcg_temp_new(); \
1151 t0 = tcg_temp_new_i64(); \
1152 gen_addr_reg_index(ctx, EA); \
1153 get_fpr(t0, rS(ctx->opcode)); \
1154 gen_qemu_##stop(ctx, t0, EA); \
1155 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
1156 tcg_temp_free(EA); \
1157 tcg_temp_free_i64(t0); \
1160 #define GEN_STXF(name, stop, opc2, opc3, type) \
1161 static void glue(gen_, name##x)(DisasContext *ctx) \
1163 TCGv EA; \
1164 TCGv_i64 t0; \
1165 if (unlikely(!ctx->fpu_enabled)) { \
1166 gen_exception(ctx, POWERPC_EXCP_FPU); \
1167 return; \
1169 gen_set_access_type(ctx, ACCESS_FLOAT); \
1170 EA = tcg_temp_new(); \
1171 t0 = tcg_temp_new_i64(); \
1172 gen_addr_reg_index(ctx, EA); \
1173 get_fpr(t0, rS(ctx->opcode)); \
1174 gen_qemu_##stop(ctx, t0, EA); \
1175 tcg_temp_free(EA); \
1176 tcg_temp_free_i64(t0); \
1179 #define GEN_STFS(name, stop, op, type) \
1180 GEN_STF(name, stop, op | 0x20, type); \
1181 GEN_STUF(name, stop, op | 0x21, type); \
1182 GEN_STUXF(name, stop, op | 0x01, type); \
1183 GEN_STXF(name, stop, 0x17, op | 0x00, type)
1185 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
1187 TCGv_i32 tmp = tcg_temp_new_i32();
1188 gen_helper_tosingle(tmp, src);
1189 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
1190 tcg_temp_free_i32(tmp);
1193 /* stfd stfdu stfdux stfdx */
1194 GEN_STFS(stfd, st64_i64, 0x16, PPC_FLOAT);
1195 /* stfs stfsu stfsux stfsx */
1196 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
1198 /* stfdepx (external PID lfdx) */
1199 static void gen_stfdepx(DisasContext *ctx)
1201 TCGv EA;
1202 TCGv_i64 t0;
1203 CHK_SV;
1204 if (unlikely(!ctx->fpu_enabled)) {
1205 gen_exception(ctx, POWERPC_EXCP_FPU);
1206 return;
1208 gen_set_access_type(ctx, ACCESS_FLOAT);
1209 EA = tcg_temp_new();
1210 t0 = tcg_temp_new_i64();
1211 gen_addr_reg_index(ctx, EA);
1212 get_fpr(t0, rD(ctx->opcode));
1213 tcg_gen_qemu_st_i64(t0, EA, PPC_TLB_EPID_STORE, DEF_MEMOP(MO_Q));
1214 tcg_temp_free(EA);
1215 tcg_temp_free_i64(t0);
1218 /* stfdp */
1219 static void gen_stfdp(DisasContext *ctx)
1221 TCGv EA;
1222 TCGv_i64 t0;
1223 if (unlikely(!ctx->fpu_enabled)) {
1224 gen_exception(ctx, POWERPC_EXCP_FPU);
1225 return;
1227 gen_set_access_type(ctx, ACCESS_FLOAT);
1228 EA = tcg_temp_new();
1229 t0 = tcg_temp_new_i64();
1230 gen_addr_imm_index(ctx, EA, 0);
1232 * We only need to swap high and low halves. gen_qemu_st64_i64
1233 * does necessary 64-bit byteswap already.
1235 if (unlikely(ctx->le_mode)) {
1236 get_fpr(t0, rD(ctx->opcode) + 1);
1237 gen_qemu_st64_i64(ctx, t0, EA);
1238 tcg_gen_addi_tl(EA, EA, 8);
1239 get_fpr(t0, rD(ctx->opcode));
1240 gen_qemu_st64_i64(ctx, t0, EA);
1241 } else {
1242 get_fpr(t0, rD(ctx->opcode));
1243 gen_qemu_st64_i64(ctx, t0, EA);
1244 tcg_gen_addi_tl(EA, EA, 8);
1245 get_fpr(t0, rD(ctx->opcode) + 1);
1246 gen_qemu_st64_i64(ctx, t0, EA);
1248 tcg_temp_free(EA);
1249 tcg_temp_free_i64(t0);
1252 /* stfdpx */
1253 static void gen_stfdpx(DisasContext *ctx)
1255 TCGv EA;
1256 TCGv_i64 t0;
1257 if (unlikely(!ctx->fpu_enabled)) {
1258 gen_exception(ctx, POWERPC_EXCP_FPU);
1259 return;
1261 gen_set_access_type(ctx, ACCESS_FLOAT);
1262 EA = tcg_temp_new();
1263 t0 = tcg_temp_new_i64();
1264 gen_addr_reg_index(ctx, EA);
1266 * We only need to swap high and low halves. gen_qemu_st64_i64
1267 * does necessary 64-bit byteswap already.
1269 if (unlikely(ctx->le_mode)) {
1270 get_fpr(t0, rD(ctx->opcode) + 1);
1271 gen_qemu_st64_i64(ctx, t0, EA);
1272 tcg_gen_addi_tl(EA, EA, 8);
1273 get_fpr(t0, rD(ctx->opcode));
1274 gen_qemu_st64_i64(ctx, t0, EA);
1275 } else {
1276 get_fpr(t0, rD(ctx->opcode));
1277 gen_qemu_st64_i64(ctx, t0, EA);
1278 tcg_gen_addi_tl(EA, EA, 8);
1279 get_fpr(t0, rD(ctx->opcode) + 1);
1280 gen_qemu_st64_i64(ctx, t0, EA);
1282 tcg_temp_free(EA);
1283 tcg_temp_free_i64(t0);
1286 /* Optional: */
1287 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
1289 TCGv t0 = tcg_temp_new();
1290 tcg_gen_trunc_i64_tl(t0, arg1),
1291 gen_qemu_st32(ctx, t0, arg2);
1292 tcg_temp_free(t0);
1294 /* stfiwx */
1295 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
1297 /* POWER2 specific instructions */
1298 /* Quad manipulation (load/store two floats at a time) */
1300 /* lfq */
1301 static void gen_lfq(DisasContext *ctx)
1303 int rd = rD(ctx->opcode);
1304 TCGv t0;
1305 TCGv_i64 t1;
1306 gen_set_access_type(ctx, ACCESS_FLOAT);
1307 t0 = tcg_temp_new();
1308 t1 = tcg_temp_new_i64();
1309 gen_addr_imm_index(ctx, t0, 0);
1310 gen_qemu_ld64_i64(ctx, t1, t0);
1311 set_fpr(rd, t1);
1312 gen_addr_add(ctx, t0, t0, 8);
1313 gen_qemu_ld64_i64(ctx, t1, t0);
1314 set_fpr((rd + 1) % 32, t1);
1315 tcg_temp_free(t0);
1316 tcg_temp_free_i64(t1);
1319 /* lfqu */
1320 static void gen_lfqu(DisasContext *ctx)
1322 int ra = rA(ctx->opcode);
1323 int rd = rD(ctx->opcode);
1324 TCGv t0, t1;
1325 TCGv_i64 t2;
1326 gen_set_access_type(ctx, ACCESS_FLOAT);
1327 t0 = tcg_temp_new();
1328 t1 = tcg_temp_new();
1329 t2 = tcg_temp_new_i64();
1330 gen_addr_imm_index(ctx, t0, 0);
1331 gen_qemu_ld64_i64(ctx, t2, t0);
1332 set_fpr(rd, t2);
1333 gen_addr_add(ctx, t1, t0, 8);
1334 gen_qemu_ld64_i64(ctx, t2, t1);
1335 set_fpr((rd + 1) % 32, t2);
1336 if (ra != 0) {
1337 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1339 tcg_temp_free(t0);
1340 tcg_temp_free(t1);
1341 tcg_temp_free_i64(t2);
1344 /* lfqux */
1345 static void gen_lfqux(DisasContext *ctx)
1347 int ra = rA(ctx->opcode);
1348 int rd = rD(ctx->opcode);
1349 gen_set_access_type(ctx, ACCESS_FLOAT);
1350 TCGv t0, t1;
1351 TCGv_i64 t2;
1352 t2 = tcg_temp_new_i64();
1353 t0 = tcg_temp_new();
1354 gen_addr_reg_index(ctx, t0);
1355 gen_qemu_ld64_i64(ctx, t2, t0);
1356 set_fpr(rd, t2);
1357 t1 = tcg_temp_new();
1358 gen_addr_add(ctx, t1, t0, 8);
1359 gen_qemu_ld64_i64(ctx, t2, t1);
1360 set_fpr((rd + 1) % 32, t2);
1361 tcg_temp_free(t1);
1362 if (ra != 0) {
1363 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1365 tcg_temp_free(t0);
1366 tcg_temp_free_i64(t2);
1369 /* lfqx */
1370 static void gen_lfqx(DisasContext *ctx)
1372 int rd = rD(ctx->opcode);
1373 TCGv t0;
1374 TCGv_i64 t1;
1375 gen_set_access_type(ctx, ACCESS_FLOAT);
1376 t0 = tcg_temp_new();
1377 t1 = tcg_temp_new_i64();
1378 gen_addr_reg_index(ctx, t0);
1379 gen_qemu_ld64_i64(ctx, t1, t0);
1380 set_fpr(rd, t1);
1381 gen_addr_add(ctx, t0, t0, 8);
1382 gen_qemu_ld64_i64(ctx, t1, t0);
1383 set_fpr((rd + 1) % 32, t1);
1384 tcg_temp_free(t0);
1385 tcg_temp_free_i64(t1);
1388 /* stfq */
1389 static void gen_stfq(DisasContext *ctx)
1391 int rd = rD(ctx->opcode);
1392 TCGv t0;
1393 TCGv_i64 t1;
1394 gen_set_access_type(ctx, ACCESS_FLOAT);
1395 t0 = tcg_temp_new();
1396 t1 = tcg_temp_new_i64();
1397 gen_addr_imm_index(ctx, t0, 0);
1398 get_fpr(t1, rd);
1399 gen_qemu_st64_i64(ctx, t1, t0);
1400 gen_addr_add(ctx, t0, t0, 8);
1401 get_fpr(t1, (rd + 1) % 32);
1402 gen_qemu_st64_i64(ctx, t1, t0);
1403 tcg_temp_free(t0);
1404 tcg_temp_free_i64(t1);
1407 /* stfqu */
1408 static void gen_stfqu(DisasContext *ctx)
1410 int ra = rA(ctx->opcode);
1411 int rd = rD(ctx->opcode);
1412 TCGv t0, t1;
1413 TCGv_i64 t2;
1414 gen_set_access_type(ctx, ACCESS_FLOAT);
1415 t2 = tcg_temp_new_i64();
1416 t0 = tcg_temp_new();
1417 gen_addr_imm_index(ctx, t0, 0);
1418 get_fpr(t2, rd);
1419 gen_qemu_st64_i64(ctx, t2, t0);
1420 t1 = tcg_temp_new();
1421 gen_addr_add(ctx, t1, t0, 8);
1422 get_fpr(t2, (rd + 1) % 32);
1423 gen_qemu_st64_i64(ctx, t2, t1);
1424 tcg_temp_free(t1);
1425 if (ra != 0) {
1426 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1428 tcg_temp_free(t0);
1429 tcg_temp_free_i64(t2);
1432 /* stfqux */
1433 static void gen_stfqux(DisasContext *ctx)
1435 int ra = rA(ctx->opcode);
1436 int rd = rD(ctx->opcode);
1437 TCGv t0, t1;
1438 TCGv_i64 t2;
1439 gen_set_access_type(ctx, ACCESS_FLOAT);
1440 t2 = tcg_temp_new_i64();
1441 t0 = tcg_temp_new();
1442 gen_addr_reg_index(ctx, t0);
1443 get_fpr(t2, rd);
1444 gen_qemu_st64_i64(ctx, t2, t0);
1445 t1 = tcg_temp_new();
1446 gen_addr_add(ctx, t1, t0, 8);
1447 get_fpr(t2, (rd + 1) % 32);
1448 gen_qemu_st64_i64(ctx, t2, t1);
1449 tcg_temp_free(t1);
1450 if (ra != 0) {
1451 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1453 tcg_temp_free(t0);
1454 tcg_temp_free_i64(t2);
1457 /* stfqx */
1458 static void gen_stfqx(DisasContext *ctx)
1460 int rd = rD(ctx->opcode);
1461 TCGv t0;
1462 TCGv_i64 t1;
1463 gen_set_access_type(ctx, ACCESS_FLOAT);
1464 t1 = tcg_temp_new_i64();
1465 t0 = tcg_temp_new();
1466 gen_addr_reg_index(ctx, t0);
1467 get_fpr(t1, rd);
1468 gen_qemu_st64_i64(ctx, t1, t0);
1469 gen_addr_add(ctx, t0, t0, 8);
1470 get_fpr(t1, (rd + 1) % 32);
1471 gen_qemu_st64_i64(ctx, t1, t0);
1472 tcg_temp_free(t0);
1473 tcg_temp_free_i64(t1);
1476 #undef _GEN_FLOAT_ACB
1477 #undef GEN_FLOAT_ACB
1478 #undef _GEN_FLOAT_AB
1479 #undef GEN_FLOAT_AB
1480 #undef _GEN_FLOAT_AC
1481 #undef GEN_FLOAT_AC
1482 #undef GEN_FLOAT_B
1483 #undef GEN_FLOAT_BS
1485 #undef GEN_LDF
1486 #undef GEN_LDUF
1487 #undef GEN_LDUXF
1488 #undef GEN_LDXF
1489 #undef GEN_LDFS
1491 #undef GEN_STF
1492 #undef GEN_STUF
1493 #undef GEN_STUXF
1494 #undef GEN_STXF
1495 #undef GEN_STFS