target/ppc: add external PID support
[qemu/ar7.git] / target / ppc / translate / fp-impl.inc.c
blob08770ba9f529c8bd31c3e1565491dbd15df90a94
1 /*
2 * translate-fp.c
4 * Standard FPU translation
5 */
7 static inline void gen_reset_fpstatus(void)
9 gen_helper_reset_fpstatus(cpu_env);
12 static inline void gen_compute_fprf_float64(TCGv_i64 arg)
14 gen_helper_compute_fprf_float64(cpu_env, arg);
15 gen_helper_float_check_status(cpu_env);
18 #if defined(TARGET_PPC64)
19 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
21 TCGv_i32 tmp = tcg_temp_new_i32();
22 tcg_gen_trunc_tl_i32(tmp, cpu_fpscr);
23 tcg_gen_shri_i32(cpu_crf[1], tmp, 28);
24 tcg_temp_free_i32(tmp);
26 #else
27 static void gen_set_cr1_from_fpscr(DisasContext *ctx)
29 tcg_gen_shri_tl(cpu_crf[1], cpu_fpscr, 28);
31 #endif
33 /*** Floating-Point arithmetic ***/
34 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
35 static void gen_f##name(DisasContext *ctx) \
36 { \
37 if (unlikely(!ctx->fpu_enabled)) { \
38 gen_exception(ctx, POWERPC_EXCP_FPU); \
39 return; \
40 } \
41 gen_reset_fpstatus(); \
42 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
43 cpu_fpr[rA(ctx->opcode)], \
44 cpu_fpr[rC(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]); \
45 if (isfloat) { \
46 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
47 cpu_fpr[rD(ctx->opcode)]); \
48 } \
49 if (set_fprf) { \
50 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]); \
51 } \
52 if (unlikely(Rc(ctx->opcode) != 0)) { \
53 gen_set_cr1_from_fpscr(ctx); \
54 } \
57 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
58 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
59 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
61 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
62 static void gen_f##name(DisasContext *ctx) \
63 { \
64 if (unlikely(!ctx->fpu_enabled)) { \
65 gen_exception(ctx, POWERPC_EXCP_FPU); \
66 return; \
67 } \
68 gen_reset_fpstatus(); \
69 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
70 cpu_fpr[rA(ctx->opcode)], \
71 cpu_fpr[rB(ctx->opcode)]); \
72 if (isfloat) { \
73 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
74 cpu_fpr[rD(ctx->opcode)]); \
75 } \
76 if (set_fprf) { \
77 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]); \
78 } \
79 if (unlikely(Rc(ctx->opcode) != 0)) { \
80 gen_set_cr1_from_fpscr(ctx); \
81 } \
83 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
84 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
85 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
87 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
88 static void gen_f##name(DisasContext *ctx) \
89 { \
90 if (unlikely(!ctx->fpu_enabled)) { \
91 gen_exception(ctx, POWERPC_EXCP_FPU); \
92 return; \
93 } \
94 gen_reset_fpstatus(); \
95 gen_helper_f##op(cpu_fpr[rD(ctx->opcode)], cpu_env, \
96 cpu_fpr[rA(ctx->opcode)], \
97 cpu_fpr[rC(ctx->opcode)]); \
98 if (isfloat) { \
99 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env, \
100 cpu_fpr[rD(ctx->opcode)]); \
102 if (set_fprf) { \
103 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]); \
105 if (unlikely(Rc(ctx->opcode) != 0)) { \
106 gen_set_cr1_from_fpscr(ctx); \
109 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
110 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
111 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
113 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
114 static void gen_f##name(DisasContext *ctx) \
116 if (unlikely(!ctx->fpu_enabled)) { \
117 gen_exception(ctx, POWERPC_EXCP_FPU); \
118 return; \
120 gen_reset_fpstatus(); \
121 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
122 cpu_fpr[rB(ctx->opcode)]); \
123 if (set_fprf) { \
124 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]); \
126 if (unlikely(Rc(ctx->opcode) != 0)) { \
127 gen_set_cr1_from_fpscr(ctx); \
131 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
132 static void gen_f##name(DisasContext *ctx) \
134 if (unlikely(!ctx->fpu_enabled)) { \
135 gen_exception(ctx, POWERPC_EXCP_FPU); \
136 return; \
138 gen_reset_fpstatus(); \
139 gen_helper_f##name(cpu_fpr[rD(ctx->opcode)], cpu_env, \
140 cpu_fpr[rB(ctx->opcode)]); \
141 if (set_fprf) { \
142 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]); \
144 if (unlikely(Rc(ctx->opcode) != 0)) { \
145 gen_set_cr1_from_fpscr(ctx); \
149 /* fadd - fadds */
150 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
151 /* fdiv - fdivs */
152 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
153 /* fmul - fmuls */
154 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
156 /* fre */
157 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
159 /* fres */
160 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
162 /* frsqrte */
163 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
165 /* frsqrtes */
166 static void gen_frsqrtes(DisasContext *ctx)
168 if (unlikely(!ctx->fpu_enabled)) {
169 gen_exception(ctx, POWERPC_EXCP_FPU);
170 return;
172 gen_reset_fpstatus();
173 gen_helper_frsqrte(cpu_fpr[rD(ctx->opcode)], cpu_env,
174 cpu_fpr[rB(ctx->opcode)]);
175 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
176 cpu_fpr[rD(ctx->opcode)]);
177 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]);
178 if (unlikely(Rc(ctx->opcode) != 0)) {
179 gen_set_cr1_from_fpscr(ctx);
183 /* fsel */
184 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
185 /* fsub - fsubs */
186 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
187 /* Optional: */
189 /* fsqrt */
190 static void gen_fsqrt(DisasContext *ctx)
192 if (unlikely(!ctx->fpu_enabled)) {
193 gen_exception(ctx, POWERPC_EXCP_FPU);
194 return;
196 gen_reset_fpstatus();
197 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
198 cpu_fpr[rB(ctx->opcode)]);
199 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]);
200 if (unlikely(Rc(ctx->opcode) != 0)) {
201 gen_set_cr1_from_fpscr(ctx);
205 static void gen_fsqrts(DisasContext *ctx)
207 if (unlikely(!ctx->fpu_enabled)) {
208 gen_exception(ctx, POWERPC_EXCP_FPU);
209 return;
211 gen_reset_fpstatus();
212 gen_helper_fsqrt(cpu_fpr[rD(ctx->opcode)], cpu_env,
213 cpu_fpr[rB(ctx->opcode)]);
214 gen_helper_frsp(cpu_fpr[rD(ctx->opcode)], cpu_env,
215 cpu_fpr[rD(ctx->opcode)]);
216 gen_compute_fprf_float64(cpu_fpr[rD(ctx->opcode)]);
217 if (unlikely(Rc(ctx->opcode) != 0)) {
218 gen_set_cr1_from_fpscr(ctx);
222 /*** Floating-Point multiply-and-add ***/
223 /* fmadd - fmadds */
224 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
225 /* fmsub - fmsubs */
226 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
227 /* fnmadd - fnmadds */
228 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
229 /* fnmsub - fnmsubs */
230 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
232 /*** Floating-Point round & convert ***/
233 /* fctiw */
234 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
235 /* fctiwu */
236 GEN_FLOAT_B(ctiwu, 0x0E, 0x04, 0, PPC2_FP_CVT_ISA206);
237 /* fctiwz */
238 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
239 /* fctiwuz */
240 GEN_FLOAT_B(ctiwuz, 0x0F, 0x04, 0, PPC2_FP_CVT_ISA206);
241 /* frsp */
242 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
243 /* fcfid */
244 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC2_FP_CVT_S64);
245 /* fcfids */
246 GEN_FLOAT_B(cfids, 0x0E, 0x1A, 0, PPC2_FP_CVT_ISA206);
247 /* fcfidu */
248 GEN_FLOAT_B(cfidu, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
249 /* fcfidus */
250 GEN_FLOAT_B(cfidus, 0x0E, 0x1E, 0, PPC2_FP_CVT_ISA206);
251 /* fctid */
252 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC2_FP_CVT_S64);
253 /* fctidu */
254 GEN_FLOAT_B(ctidu, 0x0E, 0x1D, 0, PPC2_FP_CVT_ISA206);
255 /* fctidz */
256 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC2_FP_CVT_S64);
257 /* fctidu */
258 GEN_FLOAT_B(ctiduz, 0x0F, 0x1D, 0, PPC2_FP_CVT_ISA206);
260 /* frin */
261 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
262 /* friz */
263 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
264 /* frip */
265 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
266 /* frim */
267 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
269 static void gen_ftdiv(DisasContext *ctx)
271 if (unlikely(!ctx->fpu_enabled)) {
272 gen_exception(ctx, POWERPC_EXCP_FPU);
273 return;
275 gen_helper_ftdiv(cpu_crf[crfD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
276 cpu_fpr[rB(ctx->opcode)]);
279 static void gen_ftsqrt(DisasContext *ctx)
281 if (unlikely(!ctx->fpu_enabled)) {
282 gen_exception(ctx, POWERPC_EXCP_FPU);
283 return;
285 gen_helper_ftsqrt(cpu_crf[crfD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
290 /*** Floating-Point compare ***/
292 /* fcmpo */
293 static void gen_fcmpo(DisasContext *ctx)
295 TCGv_i32 crf;
296 if (unlikely(!ctx->fpu_enabled)) {
297 gen_exception(ctx, POWERPC_EXCP_FPU);
298 return;
300 gen_reset_fpstatus();
301 crf = tcg_const_i32(crfD(ctx->opcode));
302 gen_helper_fcmpo(cpu_env, cpu_fpr[rA(ctx->opcode)],
303 cpu_fpr[rB(ctx->opcode)], crf);
304 tcg_temp_free_i32(crf);
305 gen_helper_float_check_status(cpu_env);
308 /* fcmpu */
309 static void gen_fcmpu(DisasContext *ctx)
311 TCGv_i32 crf;
312 if (unlikely(!ctx->fpu_enabled)) {
313 gen_exception(ctx, POWERPC_EXCP_FPU);
314 return;
316 gen_reset_fpstatus();
317 crf = tcg_const_i32(crfD(ctx->opcode));
318 gen_helper_fcmpu(cpu_env, cpu_fpr[rA(ctx->opcode)],
319 cpu_fpr[rB(ctx->opcode)], crf);
320 tcg_temp_free_i32(crf);
321 gen_helper_float_check_status(cpu_env);
324 /*** Floating-point move ***/
325 /* fabs */
326 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
327 static void gen_fabs(DisasContext *ctx)
329 if (unlikely(!ctx->fpu_enabled)) {
330 gen_exception(ctx, POWERPC_EXCP_FPU);
331 return;
333 tcg_gen_andi_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
334 ~(1ULL << 63));
335 if (unlikely(Rc(ctx->opcode))) {
336 gen_set_cr1_from_fpscr(ctx);
340 /* fmr - fmr. */
341 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
342 static void gen_fmr(DisasContext *ctx)
344 if (unlikely(!ctx->fpu_enabled)) {
345 gen_exception(ctx, POWERPC_EXCP_FPU);
346 return;
348 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)]);
349 if (unlikely(Rc(ctx->opcode))) {
350 gen_set_cr1_from_fpscr(ctx);
354 /* fnabs */
355 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
356 static void gen_fnabs(DisasContext *ctx)
358 if (unlikely(!ctx->fpu_enabled)) {
359 gen_exception(ctx, POWERPC_EXCP_FPU);
360 return;
362 tcg_gen_ori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
363 1ULL << 63);
364 if (unlikely(Rc(ctx->opcode))) {
365 gen_set_cr1_from_fpscr(ctx);
369 /* fneg */
370 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
371 static void gen_fneg(DisasContext *ctx)
373 if (unlikely(!ctx->fpu_enabled)) {
374 gen_exception(ctx, POWERPC_EXCP_FPU);
375 return;
377 tcg_gen_xori_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rB(ctx->opcode)],
378 1ULL << 63);
379 if (unlikely(Rc(ctx->opcode))) {
380 gen_set_cr1_from_fpscr(ctx);
384 /* fcpsgn: PowerPC 2.05 specification */
385 /* XXX: beware that fcpsgn never checks for NaNs nor update FPSCR */
386 static void gen_fcpsgn(DisasContext *ctx)
388 if (unlikely(!ctx->fpu_enabled)) {
389 gen_exception(ctx, POWERPC_EXCP_FPU);
390 return;
392 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
393 cpu_fpr[rB(ctx->opcode)], 0, 63);
394 if (unlikely(Rc(ctx->opcode))) {
395 gen_set_cr1_from_fpscr(ctx);
399 static void gen_fmrgew(DisasContext *ctx)
401 TCGv_i64 b0;
402 if (unlikely(!ctx->fpu_enabled)) {
403 gen_exception(ctx, POWERPC_EXCP_FPU);
404 return;
406 b0 = tcg_temp_new_i64();
407 tcg_gen_shri_i64(b0, cpu_fpr[rB(ctx->opcode)], 32);
408 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpr[rA(ctx->opcode)],
409 b0, 0, 32);
410 tcg_temp_free_i64(b0);
413 static void gen_fmrgow(DisasContext *ctx)
415 if (unlikely(!ctx->fpu_enabled)) {
416 gen_exception(ctx, POWERPC_EXCP_FPU);
417 return;
419 tcg_gen_deposit_i64(cpu_fpr[rD(ctx->opcode)],
420 cpu_fpr[rB(ctx->opcode)],
421 cpu_fpr[rA(ctx->opcode)],
422 32, 32);
425 /*** Floating-Point status & ctrl register ***/
427 /* mcrfs */
428 static void gen_mcrfs(DisasContext *ctx)
430 TCGv tmp = tcg_temp_new();
431 TCGv_i32 tmask;
432 TCGv_i64 tnew_fpscr = tcg_temp_new_i64();
433 int bfa;
434 int nibble;
435 int shift;
437 if (unlikely(!ctx->fpu_enabled)) {
438 gen_exception(ctx, POWERPC_EXCP_FPU);
439 return;
441 bfa = crfS(ctx->opcode);
442 nibble = 7 - bfa;
443 shift = 4 * nibble;
444 tcg_gen_shri_tl(tmp, cpu_fpscr, shift);
445 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], tmp);
446 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
447 tcg_temp_free(tmp);
448 tcg_gen_extu_tl_i64(tnew_fpscr, cpu_fpscr);
449 /* Only the exception bits (including FX) should be cleared if read */
450 tcg_gen_andi_i64(tnew_fpscr, tnew_fpscr, ~((0xF << shift) & FP_EX_CLEAR_BITS));
451 /* FEX and VX need to be updated, so don't set fpscr directly */
452 tmask = tcg_const_i32(1 << nibble);
453 gen_helper_store_fpscr(cpu_env, tnew_fpscr, tmask);
454 tcg_temp_free_i32(tmask);
455 tcg_temp_free_i64(tnew_fpscr);
458 /* mffs */
459 static void gen_mffs(DisasContext *ctx)
461 if (unlikely(!ctx->fpu_enabled)) {
462 gen_exception(ctx, POWERPC_EXCP_FPU);
463 return;
465 gen_reset_fpstatus();
466 tcg_gen_extu_tl_i64(cpu_fpr[rD(ctx->opcode)], cpu_fpscr);
467 if (unlikely(Rc(ctx->opcode))) {
468 gen_set_cr1_from_fpscr(ctx);
472 /* mtfsb0 */
473 static void gen_mtfsb0(DisasContext *ctx)
475 uint8_t crb;
477 if (unlikely(!ctx->fpu_enabled)) {
478 gen_exception(ctx, POWERPC_EXCP_FPU);
479 return;
481 crb = 31 - crbD(ctx->opcode);
482 gen_reset_fpstatus();
483 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX)) {
484 TCGv_i32 t0;
485 t0 = tcg_const_i32(crb);
486 gen_helper_fpscr_clrbit(cpu_env, t0);
487 tcg_temp_free_i32(t0);
489 if (unlikely(Rc(ctx->opcode) != 0)) {
490 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
491 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
495 /* mtfsb1 */
496 static void gen_mtfsb1(DisasContext *ctx)
498 uint8_t crb;
500 if (unlikely(!ctx->fpu_enabled)) {
501 gen_exception(ctx, POWERPC_EXCP_FPU);
502 return;
504 crb = 31 - crbD(ctx->opcode);
505 gen_reset_fpstatus();
506 /* XXX: we pretend we can only do IEEE floating-point computations */
507 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI)) {
508 TCGv_i32 t0;
509 t0 = tcg_const_i32(crb);
510 gen_helper_fpscr_setbit(cpu_env, t0);
511 tcg_temp_free_i32(t0);
513 if (unlikely(Rc(ctx->opcode) != 0)) {
514 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
515 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
517 /* We can raise a differed exception */
518 gen_helper_float_check_status(cpu_env);
521 /* mtfsf */
522 static void gen_mtfsf(DisasContext *ctx)
524 TCGv_i32 t0;
525 int flm, l, w;
527 if (unlikely(!ctx->fpu_enabled)) {
528 gen_exception(ctx, POWERPC_EXCP_FPU);
529 return;
531 flm = FPFLM(ctx->opcode);
532 l = FPL(ctx->opcode);
533 w = FPW(ctx->opcode);
534 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
535 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
536 return;
538 gen_reset_fpstatus();
539 if (l) {
540 t0 = tcg_const_i32((ctx->insns_flags2 & PPC2_ISA205) ? 0xffff : 0xff);
541 } else {
542 t0 = tcg_const_i32(flm << (w * 8));
544 gen_helper_store_fpscr(cpu_env, cpu_fpr[rB(ctx->opcode)], t0);
545 tcg_temp_free_i32(t0);
546 if (unlikely(Rc(ctx->opcode) != 0)) {
547 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
548 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
550 /* We can raise a differed exception */
551 gen_helper_float_check_status(cpu_env);
554 /* mtfsfi */
555 static void gen_mtfsfi(DisasContext *ctx)
557 int bf, sh, w;
558 TCGv_i64 t0;
559 TCGv_i32 t1;
561 if (unlikely(!ctx->fpu_enabled)) {
562 gen_exception(ctx, POWERPC_EXCP_FPU);
563 return;
565 w = FPW(ctx->opcode);
566 bf = FPBF(ctx->opcode);
567 if (unlikely(w & !(ctx->insns_flags2 & PPC2_ISA205))) {
568 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
569 return;
571 sh = (8 * w) + 7 - bf;
572 gen_reset_fpstatus();
573 t0 = tcg_const_i64(((uint64_t)FPIMM(ctx->opcode)) << (4 * sh));
574 t1 = tcg_const_i32(1 << sh);
575 gen_helper_store_fpscr(cpu_env, t0, t1);
576 tcg_temp_free_i64(t0);
577 tcg_temp_free_i32(t1);
578 if (unlikely(Rc(ctx->opcode) != 0)) {
579 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_fpscr);
580 tcg_gen_shri_i32(cpu_crf[1], cpu_crf[1], FPSCR_OX);
582 /* We can raise a differed exception */
583 gen_helper_float_check_status(cpu_env);
586 /*** Floating-point load ***/
587 #define GEN_LDF(name, ldop, opc, type) \
588 static void glue(gen_, name)(DisasContext *ctx) \
590 TCGv EA; \
591 if (unlikely(!ctx->fpu_enabled)) { \
592 gen_exception(ctx, POWERPC_EXCP_FPU); \
593 return; \
595 gen_set_access_type(ctx, ACCESS_FLOAT); \
596 EA = tcg_temp_new(); \
597 gen_addr_imm_index(ctx, EA, 0); \
598 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
599 tcg_temp_free(EA); \
602 #define GEN_LDUF(name, ldop, opc, type) \
603 static void glue(gen_, name##u)(DisasContext *ctx) \
605 TCGv EA; \
606 if (unlikely(!ctx->fpu_enabled)) { \
607 gen_exception(ctx, POWERPC_EXCP_FPU); \
608 return; \
610 if (unlikely(rA(ctx->opcode) == 0)) { \
611 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
612 return; \
614 gen_set_access_type(ctx, ACCESS_FLOAT); \
615 EA = tcg_temp_new(); \
616 gen_addr_imm_index(ctx, EA, 0); \
617 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
618 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
619 tcg_temp_free(EA); \
622 #define GEN_LDUXF(name, ldop, opc, type) \
623 static void glue(gen_, name##ux)(DisasContext *ctx) \
625 TCGv EA; \
626 if (unlikely(!ctx->fpu_enabled)) { \
627 gen_exception(ctx, POWERPC_EXCP_FPU); \
628 return; \
630 if (unlikely(rA(ctx->opcode) == 0)) { \
631 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
632 return; \
634 gen_set_access_type(ctx, ACCESS_FLOAT); \
635 EA = tcg_temp_new(); \
636 gen_addr_reg_index(ctx, EA); \
637 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
638 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
639 tcg_temp_free(EA); \
642 #define GEN_LDXF(name, ldop, opc2, opc3, type) \
643 static void glue(gen_, name##x)(DisasContext *ctx) \
645 TCGv EA; \
646 if (unlikely(!ctx->fpu_enabled)) { \
647 gen_exception(ctx, POWERPC_EXCP_FPU); \
648 return; \
650 gen_set_access_type(ctx, ACCESS_FLOAT); \
651 EA = tcg_temp_new(); \
652 gen_addr_reg_index(ctx, EA); \
653 gen_qemu_##ldop(ctx, cpu_fpr[rD(ctx->opcode)], EA); \
654 tcg_temp_free(EA); \
657 #define GEN_LDFS(name, ldop, op, type) \
658 GEN_LDF(name, ldop, op | 0x20, type); \
659 GEN_LDUF(name, ldop, op | 0x21, type); \
660 GEN_LDUXF(name, ldop, op | 0x01, type); \
661 GEN_LDXF(name, ldop, 0x17, op | 0x00, type)
663 static void gen_qemu_ld32fs(DisasContext *ctx, TCGv_i64 dest, TCGv addr)
665 TCGv_i32 tmp = tcg_temp_new_i32();
666 tcg_gen_qemu_ld_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
667 gen_helper_todouble(dest, tmp);
668 tcg_temp_free_i32(tmp);
671 /* lfd lfdu lfdux lfdx */
672 GEN_LDFS(lfd, ld64_i64, 0x12, PPC_FLOAT);
673 /* lfs lfsu lfsux lfsx */
674 GEN_LDFS(lfs, ld32fs, 0x10, PPC_FLOAT);
676 /* lfdepx (external PID lfdx) */
677 static void gen_lfdepx(DisasContext *ctx)
679 TCGv EA;
680 CHK_SV;
681 if (unlikely(!ctx->fpu_enabled)) {
682 gen_exception(ctx, POWERPC_EXCP_FPU);
683 return;
685 gen_set_access_type(ctx, ACCESS_FLOAT);
686 EA = tcg_temp_new();
687 gen_addr_reg_index(ctx, EA);
688 tcg_gen_qemu_ld_i64(cpu_fpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD,
689 DEF_MEMOP(MO_Q));
690 tcg_temp_free(EA);
693 /* lfdp */
694 static void gen_lfdp(DisasContext *ctx)
696 TCGv EA;
697 if (unlikely(!ctx->fpu_enabled)) {
698 gen_exception(ctx, POWERPC_EXCP_FPU);
699 return;
701 gen_set_access_type(ctx, ACCESS_FLOAT);
702 EA = tcg_temp_new();
703 gen_addr_imm_index(ctx, EA, 0);
704 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does
705 necessary 64-bit byteswap already. */
706 if (unlikely(ctx->le_mode)) {
707 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
708 tcg_gen_addi_tl(EA, EA, 8);
709 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
710 } else {
711 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
712 tcg_gen_addi_tl(EA, EA, 8);
713 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
715 tcg_temp_free(EA);
718 /* lfdpx */
719 static void gen_lfdpx(DisasContext *ctx)
721 TCGv EA;
722 if (unlikely(!ctx->fpu_enabled)) {
723 gen_exception(ctx, POWERPC_EXCP_FPU);
724 return;
726 gen_set_access_type(ctx, ACCESS_FLOAT);
727 EA = tcg_temp_new();
728 gen_addr_reg_index(ctx, EA);
729 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does
730 necessary 64-bit byteswap already. */
731 if (unlikely(ctx->le_mode)) {
732 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
733 tcg_gen_addi_tl(EA, EA, 8);
734 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
735 } else {
736 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
737 tcg_gen_addi_tl(EA, EA, 8);
738 gen_qemu_ld64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
740 tcg_temp_free(EA);
743 /* lfiwax */
744 static void gen_lfiwax(DisasContext *ctx)
746 TCGv EA;
747 TCGv t0;
748 if (unlikely(!ctx->fpu_enabled)) {
749 gen_exception(ctx, POWERPC_EXCP_FPU);
750 return;
752 gen_set_access_type(ctx, ACCESS_FLOAT);
753 EA = tcg_temp_new();
754 t0 = tcg_temp_new();
755 gen_addr_reg_index(ctx, EA);
756 gen_qemu_ld32s(ctx, t0, EA);
757 tcg_gen_ext_tl_i64(cpu_fpr[rD(ctx->opcode)], t0);
758 tcg_temp_free(EA);
759 tcg_temp_free(t0);
762 /* lfiwzx */
763 static void gen_lfiwzx(DisasContext *ctx)
765 TCGv EA;
766 if (unlikely(!ctx->fpu_enabled)) {
767 gen_exception(ctx, POWERPC_EXCP_FPU);
768 return;
770 gen_set_access_type(ctx, ACCESS_FLOAT);
771 EA = tcg_temp_new();
772 gen_addr_reg_index(ctx, EA);
773 gen_qemu_ld32u_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
774 tcg_temp_free(EA);
776 /*** Floating-point store ***/
777 #define GEN_STF(name, stop, opc, type) \
778 static void glue(gen_, name)(DisasContext *ctx) \
780 TCGv EA; \
781 if (unlikely(!ctx->fpu_enabled)) { \
782 gen_exception(ctx, POWERPC_EXCP_FPU); \
783 return; \
785 gen_set_access_type(ctx, ACCESS_FLOAT); \
786 EA = tcg_temp_new(); \
787 gen_addr_imm_index(ctx, EA, 0); \
788 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
789 tcg_temp_free(EA); \
792 #define GEN_STUF(name, stop, opc, type) \
793 static void glue(gen_, name##u)(DisasContext *ctx) \
795 TCGv EA; \
796 if (unlikely(!ctx->fpu_enabled)) { \
797 gen_exception(ctx, POWERPC_EXCP_FPU); \
798 return; \
800 if (unlikely(rA(ctx->opcode) == 0)) { \
801 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
802 return; \
804 gen_set_access_type(ctx, ACCESS_FLOAT); \
805 EA = tcg_temp_new(); \
806 gen_addr_imm_index(ctx, EA, 0); \
807 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
808 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
809 tcg_temp_free(EA); \
812 #define GEN_STUXF(name, stop, opc, type) \
813 static void glue(gen_, name##ux)(DisasContext *ctx) \
815 TCGv EA; \
816 if (unlikely(!ctx->fpu_enabled)) { \
817 gen_exception(ctx, POWERPC_EXCP_FPU); \
818 return; \
820 if (unlikely(rA(ctx->opcode) == 0)) { \
821 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
822 return; \
824 gen_set_access_type(ctx, ACCESS_FLOAT); \
825 EA = tcg_temp_new(); \
826 gen_addr_reg_index(ctx, EA); \
827 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
828 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
829 tcg_temp_free(EA); \
832 #define GEN_STXF(name, stop, opc2, opc3, type) \
833 static void glue(gen_, name##x)(DisasContext *ctx) \
835 TCGv EA; \
836 if (unlikely(!ctx->fpu_enabled)) { \
837 gen_exception(ctx, POWERPC_EXCP_FPU); \
838 return; \
840 gen_set_access_type(ctx, ACCESS_FLOAT); \
841 EA = tcg_temp_new(); \
842 gen_addr_reg_index(ctx, EA); \
843 gen_qemu_##stop(ctx, cpu_fpr[rS(ctx->opcode)], EA); \
844 tcg_temp_free(EA); \
847 #define GEN_STFS(name, stop, op, type) \
848 GEN_STF(name, stop, op | 0x20, type); \
849 GEN_STUF(name, stop, op | 0x21, type); \
850 GEN_STUXF(name, stop, op | 0x01, type); \
851 GEN_STXF(name, stop, 0x17, op | 0x00, type)
853 static void gen_qemu_st32fs(DisasContext *ctx, TCGv_i64 src, TCGv addr)
855 TCGv_i32 tmp = tcg_temp_new_i32();
856 gen_helper_tosingle(tmp, src);
857 tcg_gen_qemu_st_i32(tmp, addr, ctx->mem_idx, DEF_MEMOP(MO_UL));
858 tcg_temp_free_i32(tmp);
861 /* stfd stfdu stfdux stfdx */
862 GEN_STFS(stfd, st64_i64, 0x16, PPC_FLOAT);
863 /* stfs stfsu stfsux stfsx */
864 GEN_STFS(stfs, st32fs, 0x14, PPC_FLOAT);
866 /* stfdepx (external PID lfdx) */
867 static void gen_stfdepx(DisasContext *ctx)
869 TCGv EA;
870 CHK_SV;
871 if (unlikely(!ctx->fpu_enabled)) {
872 gen_exception(ctx, POWERPC_EXCP_FPU);
873 return;
875 gen_set_access_type(ctx, ACCESS_FLOAT);
876 EA = tcg_temp_new();
877 gen_addr_reg_index(ctx, EA);
878 tcg_gen_qemu_st_i64(cpu_fpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE,
879 DEF_MEMOP(MO_Q));
880 tcg_temp_free(EA);
883 /* stfdp */
884 static void gen_stfdp(DisasContext *ctx)
886 TCGv EA;
887 if (unlikely(!ctx->fpu_enabled)) {
888 gen_exception(ctx, POWERPC_EXCP_FPU);
889 return;
891 gen_set_access_type(ctx, ACCESS_FLOAT);
892 EA = tcg_temp_new();
893 gen_addr_imm_index(ctx, EA, 0);
894 /* We only need to swap high and low halves. gen_qemu_st64_i64 does
895 necessary 64-bit byteswap already. */
896 if (unlikely(ctx->le_mode)) {
897 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
898 tcg_gen_addi_tl(EA, EA, 8);
899 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
900 } else {
901 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
902 tcg_gen_addi_tl(EA, EA, 8);
903 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
905 tcg_temp_free(EA);
908 /* stfdpx */
909 static void gen_stfdpx(DisasContext *ctx)
911 TCGv EA;
912 if (unlikely(!ctx->fpu_enabled)) {
913 gen_exception(ctx, POWERPC_EXCP_FPU);
914 return;
916 gen_set_access_type(ctx, ACCESS_FLOAT);
917 EA = tcg_temp_new();
918 gen_addr_reg_index(ctx, EA);
919 /* We only need to swap high and low halves. gen_qemu_st64_i64 does
920 necessary 64-bit byteswap already. */
921 if (unlikely(ctx->le_mode)) {
922 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
923 tcg_gen_addi_tl(EA, EA, 8);
924 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
925 } else {
926 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode)], EA);
927 tcg_gen_addi_tl(EA, EA, 8);
928 gen_qemu_st64_i64(ctx, cpu_fpr[rD(ctx->opcode) + 1], EA);
930 tcg_temp_free(EA);
933 /* Optional: */
934 static inline void gen_qemu_st32fiw(DisasContext *ctx, TCGv_i64 arg1, TCGv arg2)
936 TCGv t0 = tcg_temp_new();
937 tcg_gen_trunc_i64_tl(t0, arg1),
938 gen_qemu_st32(ctx, t0, arg2);
939 tcg_temp_free(t0);
941 /* stfiwx */
942 GEN_STXF(stfiw, st32fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
944 /* POWER2 specific instructions */
945 /* Quad manipulation (load/store two floats at a time) */
947 /* lfq */
948 static void gen_lfq(DisasContext *ctx)
950 int rd = rD(ctx->opcode);
951 TCGv t0;
952 gen_set_access_type(ctx, ACCESS_FLOAT);
953 t0 = tcg_temp_new();
954 gen_addr_imm_index(ctx, t0, 0);
955 gen_qemu_ld64_i64(ctx, cpu_fpr[rd], t0);
956 gen_addr_add(ctx, t0, t0, 8);
957 gen_qemu_ld64_i64(ctx, cpu_fpr[(rd + 1) % 32], t0);
958 tcg_temp_free(t0);
961 /* lfqu */
962 static void gen_lfqu(DisasContext *ctx)
964 int ra = rA(ctx->opcode);
965 int rd = rD(ctx->opcode);
966 TCGv t0, t1;
967 gen_set_access_type(ctx, ACCESS_FLOAT);
968 t0 = tcg_temp_new();
969 t1 = tcg_temp_new();
970 gen_addr_imm_index(ctx, t0, 0);
971 gen_qemu_ld64_i64(ctx, cpu_fpr[rd], t0);
972 gen_addr_add(ctx, t1, t0, 8);
973 gen_qemu_ld64_i64(ctx, cpu_fpr[(rd + 1) % 32], t1);
974 if (ra != 0)
975 tcg_gen_mov_tl(cpu_gpr[ra], t0);
976 tcg_temp_free(t0);
977 tcg_temp_free(t1);
980 /* lfqux */
981 static void gen_lfqux(DisasContext *ctx)
983 int ra = rA(ctx->opcode);
984 int rd = rD(ctx->opcode);
985 gen_set_access_type(ctx, ACCESS_FLOAT);
986 TCGv t0, t1;
987 t0 = tcg_temp_new();
988 gen_addr_reg_index(ctx, t0);
989 gen_qemu_ld64_i64(ctx, cpu_fpr[rd], t0);
990 t1 = tcg_temp_new();
991 gen_addr_add(ctx, t1, t0, 8);
992 gen_qemu_ld64_i64(ctx, cpu_fpr[(rd + 1) % 32], t1);
993 tcg_temp_free(t1);
994 if (ra != 0)
995 tcg_gen_mov_tl(cpu_gpr[ra], t0);
996 tcg_temp_free(t0);
999 /* lfqx */
1000 static void gen_lfqx(DisasContext *ctx)
1002 int rd = rD(ctx->opcode);
1003 TCGv t0;
1004 gen_set_access_type(ctx, ACCESS_FLOAT);
1005 t0 = tcg_temp_new();
1006 gen_addr_reg_index(ctx, t0);
1007 gen_qemu_ld64_i64(ctx, cpu_fpr[rd], t0);
1008 gen_addr_add(ctx, t0, t0, 8);
1009 gen_qemu_ld64_i64(ctx, cpu_fpr[(rd + 1) % 32], t0);
1010 tcg_temp_free(t0);
1013 /* stfq */
1014 static void gen_stfq(DisasContext *ctx)
1016 int rd = rD(ctx->opcode);
1017 TCGv t0;
1018 gen_set_access_type(ctx, ACCESS_FLOAT);
1019 t0 = tcg_temp_new();
1020 gen_addr_imm_index(ctx, t0, 0);
1021 gen_qemu_st64_i64(ctx, cpu_fpr[rd], t0);
1022 gen_addr_add(ctx, t0, t0, 8);
1023 gen_qemu_st64_i64(ctx, cpu_fpr[(rd + 1) % 32], t0);
1024 tcg_temp_free(t0);
1027 /* stfqu */
1028 static void gen_stfqu(DisasContext *ctx)
1030 int ra = rA(ctx->opcode);
1031 int rd = rD(ctx->opcode);
1032 TCGv t0, t1;
1033 gen_set_access_type(ctx, ACCESS_FLOAT);
1034 t0 = tcg_temp_new();
1035 gen_addr_imm_index(ctx, t0, 0);
1036 gen_qemu_st64_i64(ctx, cpu_fpr[rd], t0);
1037 t1 = tcg_temp_new();
1038 gen_addr_add(ctx, t1, t0, 8);
1039 gen_qemu_st64_i64(ctx, cpu_fpr[(rd + 1) % 32], t1);
1040 tcg_temp_free(t1);
1041 if (ra != 0)
1042 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1043 tcg_temp_free(t0);
1046 /* stfqux */
1047 static void gen_stfqux(DisasContext *ctx)
1049 int ra = rA(ctx->opcode);
1050 int rd = rD(ctx->opcode);
1051 TCGv t0, t1;
1052 gen_set_access_type(ctx, ACCESS_FLOAT);
1053 t0 = tcg_temp_new();
1054 gen_addr_reg_index(ctx, t0);
1055 gen_qemu_st64_i64(ctx, cpu_fpr[rd], t0);
1056 t1 = tcg_temp_new();
1057 gen_addr_add(ctx, t1, t0, 8);
1058 gen_qemu_st64_i64(ctx, cpu_fpr[(rd + 1) % 32], t1);
1059 tcg_temp_free(t1);
1060 if (ra != 0)
1061 tcg_gen_mov_tl(cpu_gpr[ra], t0);
1062 tcg_temp_free(t0);
1065 /* stfqx */
1066 static void gen_stfqx(DisasContext *ctx)
1068 int rd = rD(ctx->opcode);
1069 TCGv t0;
1070 gen_set_access_type(ctx, ACCESS_FLOAT);
1071 t0 = tcg_temp_new();
1072 gen_addr_reg_index(ctx, t0);
1073 gen_qemu_st64_i64(ctx, cpu_fpr[rd], t0);
1074 gen_addr_add(ctx, t0, t0, 8);
1075 gen_qemu_st64_i64(ctx, cpu_fpr[(rd + 1) % 32], t0);
1076 tcg_temp_free(t0);
1079 #undef _GEN_FLOAT_ACB
1080 #undef GEN_FLOAT_ACB
1081 #undef _GEN_FLOAT_AB
1082 #undef GEN_FLOAT_AB
1083 #undef _GEN_FLOAT_AC
1084 #undef GEN_FLOAT_AC
1085 #undef GEN_FLOAT_B
1086 #undef GEN_FLOAT_BS
1088 #undef GEN_LDF
1089 #undef GEN_LDUF
1090 #undef GEN_LDUXF
1091 #undef GEN_LDXF
1092 #undef GEN_LDFS
1094 #undef GEN_STF
1095 #undef GEN_STUF
1096 #undef GEN_STUXF
1097 #undef GEN_STXF
1098 #undef GEN_STFS