1 /*** VSX extension ***/
3 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
8 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
13 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
18 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
23 static inline TCGv_ptr gen_vsr_ptr(int reg)
25 TCGv_ptr r = tcg_temp_new_ptr();
26 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
30 #define VSX_LOAD_SCALAR(name, operation) \
31 static void gen_##name(DisasContext *ctx) \
35 if (unlikely(!ctx->vsx_enabled)) { \
36 gen_exception(ctx, POWERPC_EXCP_VSXU); \
39 t0 = tcg_temp_new_i64(); \
40 gen_set_access_type(ctx, ACCESS_INT); \
41 EA = tcg_temp_new(); \
42 gen_addr_reg_index(ctx, EA); \
43 gen_qemu_##operation(ctx, t0, EA); \
44 set_cpu_vsrh(xT(ctx->opcode), t0); \
45 /* NOTE: cpu_vsrl is undefined */ \
47 tcg_temp_free_i64(t0); \
50 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
51 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
52 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
53 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
54 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
55 VSX_LOAD_SCALAR(lxsspx, ld32fs)
57 static void gen_lxvd2x(DisasContext *ctx)
61 if (unlikely(!ctx->vsx_enabled)) {
62 gen_exception(ctx, POWERPC_EXCP_VSXU);
65 t0 = tcg_temp_new_i64();
66 gen_set_access_type(ctx, ACCESS_INT);
68 gen_addr_reg_index(ctx, EA);
69 gen_qemu_ld64_i64(ctx, t0, EA);
70 set_cpu_vsrh(xT(ctx->opcode), t0);
71 tcg_gen_addi_tl(EA, EA, 8);
72 gen_qemu_ld64_i64(ctx, t0, EA);
73 set_cpu_vsrl(xT(ctx->opcode), t0);
75 tcg_temp_free_i64(t0);
78 static void gen_lxvw4x(DisasContext *ctx)
83 if (unlikely(!ctx->vsx_enabled)) {
84 gen_exception(ctx, POWERPC_EXCP_VSXU);
87 xth = tcg_temp_new_i64();
88 xtl = tcg_temp_new_i64();
90 gen_set_access_type(ctx, ACCESS_INT);
93 gen_addr_reg_index(ctx, EA);
95 TCGv_i64 t0 = tcg_temp_new_i64();
96 TCGv_i64 t1 = tcg_temp_new_i64();
98 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
99 tcg_gen_shri_i64(t1, t0, 32);
100 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
101 tcg_gen_addi_tl(EA, EA, 8);
102 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
103 tcg_gen_shri_i64(t1, t0, 32);
104 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
105 tcg_temp_free_i64(t0);
106 tcg_temp_free_i64(t1);
108 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
109 tcg_gen_addi_tl(EA, EA, 8);
110 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
112 set_cpu_vsrh(xT(ctx->opcode), xth);
113 set_cpu_vsrl(xT(ctx->opcode), xtl);
115 tcg_temp_free_i64(xth);
116 tcg_temp_free_i64(xtl);
119 static void gen_lxvwsx(DisasContext *ctx)
124 if (xT(ctx->opcode) < 32) {
125 if (unlikely(!ctx->vsx_enabled)) {
126 gen_exception(ctx, POWERPC_EXCP_VSXU);
130 if (unlikely(!ctx->altivec_enabled)) {
131 gen_exception(ctx, POWERPC_EXCP_VPU);
136 gen_set_access_type(ctx, ACCESS_INT);
139 gen_addr_reg_index(ctx, EA);
141 data = tcg_temp_new_i32();
142 tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, MO_TEUL);
143 tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
146 tcg_temp_free_i32(data);
149 static void gen_lxvdsx(DisasContext *ctx)
154 if (unlikely(!ctx->vsx_enabled)) {
155 gen_exception(ctx, POWERPC_EXCP_VSXU);
159 gen_set_access_type(ctx, ACCESS_INT);
162 gen_addr_reg_index(ctx, EA);
164 data = tcg_temp_new_i64();
165 tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, MO_TEQ);
166 tcg_gen_gvec_dup_i64(MO_Q, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
169 tcg_temp_free_i64(data);
172 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
173 TCGv_i64 inh, TCGv_i64 inl)
175 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
176 TCGv_i64 t0 = tcg_temp_new_i64();
177 TCGv_i64 t1 = tcg_temp_new_i64();
179 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
180 tcg_gen_and_i64(t0, inh, mask);
181 tcg_gen_shli_i64(t0, t0, 8);
182 tcg_gen_shri_i64(t1, inh, 8);
183 tcg_gen_and_i64(t1, t1, mask);
184 tcg_gen_or_i64(outh, t0, t1);
186 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
187 tcg_gen_and_i64(t0, inl, mask);
188 tcg_gen_shli_i64(t0, t0, 8);
189 tcg_gen_shri_i64(t1, inl, 8);
190 tcg_gen_and_i64(t1, t1, mask);
191 tcg_gen_or_i64(outl, t0, t1);
193 tcg_temp_free_i64(t0);
194 tcg_temp_free_i64(t1);
195 tcg_temp_free_i64(mask);
198 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
199 TCGv_i64 inh, TCGv_i64 inl)
201 TCGv_i64 hi = tcg_temp_new_i64();
202 TCGv_i64 lo = tcg_temp_new_i64();
204 tcg_gen_bswap64_i64(hi, inh);
205 tcg_gen_bswap64_i64(lo, inl);
206 tcg_gen_shri_i64(outh, hi, 32);
207 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
208 tcg_gen_shri_i64(outl, lo, 32);
209 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
211 tcg_temp_free_i64(hi);
212 tcg_temp_free_i64(lo);
214 static void gen_lxvh8x(DisasContext *ctx)
220 if (unlikely(!ctx->vsx_enabled)) {
221 gen_exception(ctx, POWERPC_EXCP_VSXU);
224 xth = tcg_temp_new_i64();
225 xtl = tcg_temp_new_i64();
226 gen_set_access_type(ctx, ACCESS_INT);
229 gen_addr_reg_index(ctx, EA);
230 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
231 tcg_gen_addi_tl(EA, EA, 8);
232 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
234 gen_bswap16x8(xth, xtl, xth, xtl);
236 set_cpu_vsrh(xT(ctx->opcode), xth);
237 set_cpu_vsrl(xT(ctx->opcode), xtl);
239 tcg_temp_free_i64(xth);
240 tcg_temp_free_i64(xtl);
243 static void gen_lxvb16x(DisasContext *ctx)
249 if (unlikely(!ctx->vsx_enabled)) {
250 gen_exception(ctx, POWERPC_EXCP_VSXU);
253 xth = tcg_temp_new_i64();
254 xtl = tcg_temp_new_i64();
255 gen_set_access_type(ctx, ACCESS_INT);
257 gen_addr_reg_index(ctx, EA);
258 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
259 tcg_gen_addi_tl(EA, EA, 8);
260 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
261 set_cpu_vsrh(xT(ctx->opcode), xth);
262 set_cpu_vsrl(xT(ctx->opcode), xtl);
264 tcg_temp_free_i64(xth);
265 tcg_temp_free_i64(xtl);
268 #define VSX_VECTOR_LOAD(name, op, indexed) \
269 static void gen_##name(DisasContext *ctx) \
277 xt = xT(ctx->opcode); \
279 xt = DQxT(ctx->opcode); \
283 if (unlikely(!ctx->vsx_enabled)) { \
284 gen_exception(ctx, POWERPC_EXCP_VSXU); \
288 if (unlikely(!ctx->altivec_enabled)) { \
289 gen_exception(ctx, POWERPC_EXCP_VPU); \
293 xth = tcg_temp_new_i64(); \
294 xtl = tcg_temp_new_i64(); \
295 gen_set_access_type(ctx, ACCESS_INT); \
296 EA = tcg_temp_new(); \
298 gen_addr_reg_index(ctx, EA); \
300 gen_addr_imm_index(ctx, EA, 0x0F); \
302 if (ctx->le_mode) { \
303 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
304 set_cpu_vsrl(xt, xtl); \
305 tcg_gen_addi_tl(EA, EA, 8); \
306 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
307 set_cpu_vsrh(xt, xth); \
309 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
310 set_cpu_vsrh(xt, xth); \
311 tcg_gen_addi_tl(EA, EA, 8); \
312 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
313 set_cpu_vsrl(xt, xtl); \
316 tcg_temp_free_i64(xth); \
317 tcg_temp_free_i64(xtl); \
320 VSX_VECTOR_LOAD(lxv, ld_i64, 0)
321 VSX_VECTOR_LOAD(lxvx, ld_i64, 1)
323 #define VSX_VECTOR_STORE(name, op, indexed) \
324 static void gen_##name(DisasContext *ctx) \
332 xt = xT(ctx->opcode); \
334 xt = DQxT(ctx->opcode); \
338 if (unlikely(!ctx->vsx_enabled)) { \
339 gen_exception(ctx, POWERPC_EXCP_VSXU); \
343 if (unlikely(!ctx->altivec_enabled)) { \
344 gen_exception(ctx, POWERPC_EXCP_VPU); \
348 xth = tcg_temp_new_i64(); \
349 xtl = tcg_temp_new_i64(); \
350 get_cpu_vsrh(xth, xt); \
351 get_cpu_vsrl(xtl, xt); \
352 gen_set_access_type(ctx, ACCESS_INT); \
353 EA = tcg_temp_new(); \
355 gen_addr_reg_index(ctx, EA); \
357 gen_addr_imm_index(ctx, EA, 0x0F); \
359 if (ctx->le_mode) { \
360 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
361 tcg_gen_addi_tl(EA, EA, 8); \
362 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
364 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
365 tcg_gen_addi_tl(EA, EA, 8); \
366 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
369 tcg_temp_free_i64(xth); \
370 tcg_temp_free_i64(xtl); \
373 VSX_VECTOR_STORE(stxv, st_i64, 0)
374 VSX_VECTOR_STORE(stxvx, st_i64, 1)
377 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
378 static void gen_##name(DisasContext *ctx) \
383 if (xT(ctx->opcode) < 32) { \
384 if (unlikely(!ctx->vsx_enabled)) { \
385 gen_exception(ctx, POWERPC_EXCP_VSXU); \
389 if (unlikely(!ctx->altivec_enabled)) { \
390 gen_exception(ctx, POWERPC_EXCP_VPU); \
394 EA = tcg_temp_new(); \
395 xt = gen_vsr_ptr(xT(ctx->opcode)); \
396 gen_set_access_type(ctx, ACCESS_INT); \
397 gen_addr_register(ctx, EA); \
398 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
400 tcg_temp_free_ptr(xt); \
403 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
404 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
405 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
406 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
409 #define VSX_LOAD_SCALAR_DS(name, operation) \
410 static void gen_##name(DisasContext *ctx) \
415 if (unlikely(!ctx->altivec_enabled)) { \
416 gen_exception(ctx, POWERPC_EXCP_VPU); \
419 xth = tcg_temp_new_i64(); \
420 gen_set_access_type(ctx, ACCESS_INT); \
421 EA = tcg_temp_new(); \
422 gen_addr_imm_index(ctx, EA, 0x03); \
423 gen_qemu_##operation(ctx, xth, EA); \
424 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
425 /* NOTE: cpu_vsrl is undefined */ \
427 tcg_temp_free_i64(xth); \
430 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
431 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
433 #define VSX_STORE_SCALAR(name, operation) \
434 static void gen_##name(DisasContext *ctx) \
438 if (unlikely(!ctx->vsx_enabled)) { \
439 gen_exception(ctx, POWERPC_EXCP_VSXU); \
442 t0 = tcg_temp_new_i64(); \
443 gen_set_access_type(ctx, ACCESS_INT); \
444 EA = tcg_temp_new(); \
445 gen_addr_reg_index(ctx, EA); \
446 get_cpu_vsrh(t0, xS(ctx->opcode)); \
447 gen_qemu_##operation(ctx, t0, EA); \
449 tcg_temp_free_i64(t0); \
452 VSX_STORE_SCALAR(stxsdx, st64_i64)
454 VSX_STORE_SCALAR(stxsibx, st8_i64)
455 VSX_STORE_SCALAR(stxsihx, st16_i64)
456 VSX_STORE_SCALAR(stxsiwx, st32_i64)
457 VSX_STORE_SCALAR(stxsspx, st32fs)
459 static void gen_stxvd2x(DisasContext *ctx)
463 if (unlikely(!ctx->vsx_enabled)) {
464 gen_exception(ctx, POWERPC_EXCP_VSXU);
467 t0 = tcg_temp_new_i64();
468 gen_set_access_type(ctx, ACCESS_INT);
470 gen_addr_reg_index(ctx, EA);
471 get_cpu_vsrh(t0, xS(ctx->opcode));
472 gen_qemu_st64_i64(ctx, t0, EA);
473 tcg_gen_addi_tl(EA, EA, 8);
474 get_cpu_vsrl(t0, xS(ctx->opcode));
475 gen_qemu_st64_i64(ctx, t0, EA);
477 tcg_temp_free_i64(t0);
480 static void gen_stxvw4x(DisasContext *ctx)
486 if (unlikely(!ctx->vsx_enabled)) {
487 gen_exception(ctx, POWERPC_EXCP_VSXU);
490 xsh = tcg_temp_new_i64();
491 xsl = tcg_temp_new_i64();
492 get_cpu_vsrh(xsh, xS(ctx->opcode));
493 get_cpu_vsrl(xsl, xS(ctx->opcode));
494 gen_set_access_type(ctx, ACCESS_INT);
496 gen_addr_reg_index(ctx, EA);
498 TCGv_i64 t0 = tcg_temp_new_i64();
499 TCGv_i64 t1 = tcg_temp_new_i64();
501 tcg_gen_shri_i64(t0, xsh, 32);
502 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
503 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
504 tcg_gen_addi_tl(EA, EA, 8);
505 tcg_gen_shri_i64(t0, xsl, 32);
506 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
507 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
508 tcg_temp_free_i64(t0);
509 tcg_temp_free_i64(t1);
511 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
512 tcg_gen_addi_tl(EA, EA, 8);
513 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
516 tcg_temp_free_i64(xsh);
517 tcg_temp_free_i64(xsl);
520 static void gen_stxvh8x(DisasContext *ctx)
526 if (unlikely(!ctx->vsx_enabled)) {
527 gen_exception(ctx, POWERPC_EXCP_VSXU);
530 xsh = tcg_temp_new_i64();
531 xsl = tcg_temp_new_i64();
532 get_cpu_vsrh(xsh, xS(ctx->opcode));
533 get_cpu_vsrl(xsl, xS(ctx->opcode));
534 gen_set_access_type(ctx, ACCESS_INT);
536 gen_addr_reg_index(ctx, EA);
538 TCGv_i64 outh = tcg_temp_new_i64();
539 TCGv_i64 outl = tcg_temp_new_i64();
541 gen_bswap16x8(outh, outl, xsh, xsl);
542 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
543 tcg_gen_addi_tl(EA, EA, 8);
544 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
545 tcg_temp_free_i64(outh);
546 tcg_temp_free_i64(outl);
548 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
549 tcg_gen_addi_tl(EA, EA, 8);
550 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
553 tcg_temp_free_i64(xsh);
554 tcg_temp_free_i64(xsl);
557 static void gen_stxvb16x(DisasContext *ctx)
563 if (unlikely(!ctx->vsx_enabled)) {
564 gen_exception(ctx, POWERPC_EXCP_VSXU);
567 xsh = tcg_temp_new_i64();
568 xsl = tcg_temp_new_i64();
569 get_cpu_vsrh(xsh, xS(ctx->opcode));
570 get_cpu_vsrl(xsl, xS(ctx->opcode));
571 gen_set_access_type(ctx, ACCESS_INT);
573 gen_addr_reg_index(ctx, EA);
574 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
575 tcg_gen_addi_tl(EA, EA, 8);
576 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
578 tcg_temp_free_i64(xsh);
579 tcg_temp_free_i64(xsl);
582 #define VSX_STORE_SCALAR_DS(name, operation) \
583 static void gen_##name(DisasContext *ctx) \
588 if (unlikely(!ctx->altivec_enabled)) { \
589 gen_exception(ctx, POWERPC_EXCP_VPU); \
592 xth = tcg_temp_new_i64(); \
593 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
594 gen_set_access_type(ctx, ACCESS_INT); \
595 EA = tcg_temp_new(); \
596 gen_addr_imm_index(ctx, EA, 0x03); \
597 gen_qemu_##operation(ctx, xth, EA); \
598 /* NOTE: cpu_vsrl is undefined */ \
600 tcg_temp_free_i64(xth); \
603 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
604 VSX_STORE_SCALAR_DS(stxssp, st32fs)
606 static void gen_mfvsrwz(DisasContext *ctx)
608 if (xS(ctx->opcode) < 32) {
609 if (unlikely(!ctx->fpu_enabled)) {
610 gen_exception(ctx, POWERPC_EXCP_FPU);
614 if (unlikely(!ctx->altivec_enabled)) {
615 gen_exception(ctx, POWERPC_EXCP_VPU);
619 TCGv_i64 tmp = tcg_temp_new_i64();
620 TCGv_i64 xsh = tcg_temp_new_i64();
621 get_cpu_vsrh(xsh, xS(ctx->opcode));
622 tcg_gen_ext32u_i64(tmp, xsh);
623 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
624 tcg_temp_free_i64(tmp);
625 tcg_temp_free_i64(xsh);
628 static void gen_mtvsrwa(DisasContext *ctx)
630 if (xS(ctx->opcode) < 32) {
631 if (unlikely(!ctx->fpu_enabled)) {
632 gen_exception(ctx, POWERPC_EXCP_FPU);
636 if (unlikely(!ctx->altivec_enabled)) {
637 gen_exception(ctx, POWERPC_EXCP_VPU);
641 TCGv_i64 tmp = tcg_temp_new_i64();
642 TCGv_i64 xsh = tcg_temp_new_i64();
643 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
644 tcg_gen_ext32s_i64(xsh, tmp);
645 set_cpu_vsrh(xT(ctx->opcode), xsh);
646 tcg_temp_free_i64(tmp);
647 tcg_temp_free_i64(xsh);
650 static void gen_mtvsrwz(DisasContext *ctx)
652 if (xS(ctx->opcode) < 32) {
653 if (unlikely(!ctx->fpu_enabled)) {
654 gen_exception(ctx, POWERPC_EXCP_FPU);
658 if (unlikely(!ctx->altivec_enabled)) {
659 gen_exception(ctx, POWERPC_EXCP_VPU);
663 TCGv_i64 tmp = tcg_temp_new_i64();
664 TCGv_i64 xsh = tcg_temp_new_i64();
665 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
666 tcg_gen_ext32u_i64(xsh, tmp);
667 set_cpu_vsrh(xT(ctx->opcode), xsh);
668 tcg_temp_free_i64(tmp);
669 tcg_temp_free_i64(xsh);
672 #if defined(TARGET_PPC64)
673 static void gen_mfvsrd(DisasContext *ctx)
676 if (xS(ctx->opcode) < 32) {
677 if (unlikely(!ctx->fpu_enabled)) {
678 gen_exception(ctx, POWERPC_EXCP_FPU);
682 if (unlikely(!ctx->altivec_enabled)) {
683 gen_exception(ctx, POWERPC_EXCP_VPU);
687 t0 = tcg_temp_new_i64();
688 get_cpu_vsrh(t0, xS(ctx->opcode));
689 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
690 tcg_temp_free_i64(t0);
693 static void gen_mtvsrd(DisasContext *ctx)
696 if (xS(ctx->opcode) < 32) {
697 if (unlikely(!ctx->fpu_enabled)) {
698 gen_exception(ctx, POWERPC_EXCP_FPU);
702 if (unlikely(!ctx->altivec_enabled)) {
703 gen_exception(ctx, POWERPC_EXCP_VPU);
707 t0 = tcg_temp_new_i64();
708 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
709 set_cpu_vsrh(xT(ctx->opcode), t0);
710 tcg_temp_free_i64(t0);
713 static void gen_mfvsrld(DisasContext *ctx)
716 if (xS(ctx->opcode) < 32) {
717 if (unlikely(!ctx->vsx_enabled)) {
718 gen_exception(ctx, POWERPC_EXCP_VSXU);
722 if (unlikely(!ctx->altivec_enabled)) {
723 gen_exception(ctx, POWERPC_EXCP_VPU);
727 t0 = tcg_temp_new_i64();
728 get_cpu_vsrl(t0, xS(ctx->opcode));
729 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
730 tcg_temp_free_i64(t0);
733 static void gen_mtvsrdd(DisasContext *ctx)
736 if (xT(ctx->opcode) < 32) {
737 if (unlikely(!ctx->vsx_enabled)) {
738 gen_exception(ctx, POWERPC_EXCP_VSXU);
742 if (unlikely(!ctx->altivec_enabled)) {
743 gen_exception(ctx, POWERPC_EXCP_VPU);
748 t0 = tcg_temp_new_i64();
749 if (!rA(ctx->opcode)) {
750 tcg_gen_movi_i64(t0, 0);
752 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
754 set_cpu_vsrh(xT(ctx->opcode), t0);
756 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
757 set_cpu_vsrl(xT(ctx->opcode), t0);
758 tcg_temp_free_i64(t0);
761 static void gen_mtvsrws(DisasContext *ctx)
764 if (xT(ctx->opcode) < 32) {
765 if (unlikely(!ctx->vsx_enabled)) {
766 gen_exception(ctx, POWERPC_EXCP_VSXU);
770 if (unlikely(!ctx->altivec_enabled)) {
771 gen_exception(ctx, POWERPC_EXCP_VPU);
776 t0 = tcg_temp_new_i64();
777 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
778 cpu_gpr[rA(ctx->opcode)], 32, 32);
779 set_cpu_vsrl(xT(ctx->opcode), t0);
780 set_cpu_vsrh(xT(ctx->opcode), t0);
781 tcg_temp_free_i64(t0);
786 static void gen_xxpermdi(DisasContext *ctx)
790 if (unlikely(!ctx->vsx_enabled)) {
791 gen_exception(ctx, POWERPC_EXCP_VSXU);
795 xh = tcg_temp_new_i64();
796 xl = tcg_temp_new_i64();
798 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
799 (xT(ctx->opcode) == xB(ctx->opcode)))) {
800 if ((DM(ctx->opcode) & 2) == 0) {
801 get_cpu_vsrh(xh, xA(ctx->opcode));
803 get_cpu_vsrl(xh, xA(ctx->opcode));
805 if ((DM(ctx->opcode) & 1) == 0) {
806 get_cpu_vsrh(xl, xB(ctx->opcode));
808 get_cpu_vsrl(xl, xB(ctx->opcode));
811 set_cpu_vsrh(xT(ctx->opcode), xh);
812 set_cpu_vsrl(xT(ctx->opcode), xl);
814 if ((DM(ctx->opcode) & 2) == 0) {
815 get_cpu_vsrh(xh, xA(ctx->opcode));
816 set_cpu_vsrh(xT(ctx->opcode), xh);
818 get_cpu_vsrl(xh, xA(ctx->opcode));
819 set_cpu_vsrh(xT(ctx->opcode), xh);
821 if ((DM(ctx->opcode) & 1) == 0) {
822 get_cpu_vsrh(xl, xB(ctx->opcode));
823 set_cpu_vsrl(xT(ctx->opcode), xl);
825 get_cpu_vsrl(xl, xB(ctx->opcode));
826 set_cpu_vsrl(xT(ctx->opcode), xl);
829 tcg_temp_free_i64(xh);
830 tcg_temp_free_i64(xl);
837 #define SGN_MASK_DP 0x8000000000000000ull
838 #define SGN_MASK_SP 0x8000000080000000ull
840 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
841 static void glue(gen_, name)(DisasContext *ctx) \
844 if (unlikely(!ctx->vsx_enabled)) { \
845 gen_exception(ctx, POWERPC_EXCP_VSXU); \
848 xb = tcg_temp_new_i64(); \
849 sgm = tcg_temp_new_i64(); \
850 get_cpu_vsrh(xb, xB(ctx->opcode)); \
851 tcg_gen_movi_i64(sgm, sgn_mask); \
854 tcg_gen_andc_i64(xb, xb, sgm); \
858 tcg_gen_or_i64(xb, xb, sgm); \
862 tcg_gen_xor_i64(xb, xb, sgm); \
866 TCGv_i64 xa = tcg_temp_new_i64(); \
867 get_cpu_vsrh(xa, xA(ctx->opcode)); \
868 tcg_gen_and_i64(xa, xa, sgm); \
869 tcg_gen_andc_i64(xb, xb, sgm); \
870 tcg_gen_or_i64(xb, xb, xa); \
871 tcg_temp_free_i64(xa); \
875 set_cpu_vsrh(xT(ctx->opcode), xb); \
876 tcg_temp_free_i64(xb); \
877 tcg_temp_free_i64(sgm); \
880 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
881 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
882 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
883 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
885 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
886 static void glue(gen_, name)(DisasContext *ctx) \
889 int xt = rD(ctx->opcode) + 32; \
890 int xb = rB(ctx->opcode) + 32; \
891 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
893 if (unlikely(!ctx->vsx_enabled)) { \
894 gen_exception(ctx, POWERPC_EXCP_VSXU); \
897 xbh = tcg_temp_new_i64(); \
898 xbl = tcg_temp_new_i64(); \
899 sgm = tcg_temp_new_i64(); \
900 tmp = tcg_temp_new_i64(); \
901 get_cpu_vsrh(xbh, xb); \
902 get_cpu_vsrl(xbl, xb); \
903 tcg_gen_movi_i64(sgm, sgn_mask); \
906 tcg_gen_andc_i64(xbh, xbh, sgm); \
909 tcg_gen_or_i64(xbh, xbh, sgm); \
912 tcg_gen_xor_i64(xbh, xbh, sgm); \
915 xah = tcg_temp_new_i64(); \
916 xa = rA(ctx->opcode) + 32; \
917 get_cpu_vsrh(tmp, xa); \
918 tcg_gen_and_i64(xah, tmp, sgm); \
919 tcg_gen_andc_i64(xbh, xbh, sgm); \
920 tcg_gen_or_i64(xbh, xbh, xah); \
921 tcg_temp_free_i64(xah); \
924 set_cpu_vsrh(xt, xbh); \
925 set_cpu_vsrl(xt, xbl); \
926 tcg_temp_free_i64(xbl); \
927 tcg_temp_free_i64(xbh); \
928 tcg_temp_free_i64(sgm); \
929 tcg_temp_free_i64(tmp); \
932 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
933 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
934 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
935 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
937 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
938 static void glue(gen_, name)(DisasContext *ctx) \
940 TCGv_i64 xbh, xbl, sgm; \
941 if (unlikely(!ctx->vsx_enabled)) { \
942 gen_exception(ctx, POWERPC_EXCP_VSXU); \
945 xbh = tcg_temp_new_i64(); \
946 xbl = tcg_temp_new_i64(); \
947 sgm = tcg_temp_new_i64(); \
948 get_cpu_vsrh(xbh, xB(ctx->opcode)); \
949 get_cpu_vsrl(xbl, xB(ctx->opcode)); \
950 tcg_gen_movi_i64(sgm, sgn_mask); \
953 tcg_gen_andc_i64(xbh, xbh, sgm); \
954 tcg_gen_andc_i64(xbl, xbl, sgm); \
958 tcg_gen_or_i64(xbh, xbh, sgm); \
959 tcg_gen_or_i64(xbl, xbl, sgm); \
963 tcg_gen_xor_i64(xbh, xbh, sgm); \
964 tcg_gen_xor_i64(xbl, xbl, sgm); \
968 TCGv_i64 xah = tcg_temp_new_i64(); \
969 TCGv_i64 xal = tcg_temp_new_i64(); \
970 get_cpu_vsrh(xah, xA(ctx->opcode)); \
971 get_cpu_vsrl(xal, xA(ctx->opcode)); \
972 tcg_gen_and_i64(xah, xah, sgm); \
973 tcg_gen_and_i64(xal, xal, sgm); \
974 tcg_gen_andc_i64(xbh, xbh, sgm); \
975 tcg_gen_andc_i64(xbl, xbl, sgm); \
976 tcg_gen_or_i64(xbh, xbh, xah); \
977 tcg_gen_or_i64(xbl, xbl, xal); \
978 tcg_temp_free_i64(xah); \
979 tcg_temp_free_i64(xal); \
983 set_cpu_vsrh(xT(ctx->opcode), xbh); \
984 set_cpu_vsrl(xT(ctx->opcode), xbl); \
985 tcg_temp_free_i64(xbh); \
986 tcg_temp_free_i64(xbl); \
987 tcg_temp_free_i64(sgm); \
990 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
991 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
992 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
993 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
994 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
995 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
996 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
997 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
999 #define VSX_CMP(name, op1, op2, inval, type) \
1000 static void gen_##name(DisasContext *ctx) \
1003 TCGv_ptr xt, xa, xb; \
1004 if (unlikely(!ctx->vsx_enabled)) { \
1005 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1008 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1009 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1010 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1011 if ((ctx->opcode >> (31 - 21)) & 1) { \
1012 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \
1014 ignored = tcg_temp_new_i32(); \
1015 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \
1016 tcg_temp_free_i32(ignored); \
1018 gen_helper_float_check_status(cpu_env); \
1019 tcg_temp_free_ptr(xt); \
1020 tcg_temp_free_ptr(xa); \
1021 tcg_temp_free_ptr(xb); \
1024 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1025 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1026 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1027 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1028 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1029 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1030 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1031 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1033 static void gen_xscvqpdp(DisasContext *ctx)
1037 if (unlikely(!ctx->vsx_enabled)) {
1038 gen_exception(ctx, POWERPC_EXCP_VSXU);
1041 opc = tcg_const_i32(ctx->opcode);
1042 xt = gen_vsr_ptr(xT(ctx->opcode));
1043 xb = gen_vsr_ptr(xB(ctx->opcode));
1044 gen_helper_xscvqpdp(cpu_env, opc, xt, xb);
1045 tcg_temp_free_i32(opc);
1046 tcg_temp_free_ptr(xt);
1047 tcg_temp_free_ptr(xb);
1050 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
1051 static void gen_##name(DisasContext *ctx) \
1054 if (unlikely(!ctx->vsx_enabled)) { \
1055 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1058 opc = tcg_const_i32(ctx->opcode); \
1059 gen_helper_##name(cpu_env, opc); \
1060 tcg_temp_free_i32(opc); \
1063 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \
1064 static void gen_##name(DisasContext *ctx) \
1066 TCGv_ptr xt, xa, xb; \
1067 if (unlikely(!ctx->vsx_enabled)) { \
1068 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1071 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1072 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1073 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1074 gen_helper_##name(cpu_env, xt, xa, xb); \
1075 tcg_temp_free_ptr(xt); \
1076 tcg_temp_free_ptr(xa); \
1077 tcg_temp_free_ptr(xb); \
1080 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \
1081 static void gen_##name(DisasContext *ctx) \
1084 if (unlikely(!ctx->vsx_enabled)) { \
1085 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1088 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1089 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1090 gen_helper_##name(cpu_env, xt, xb); \
1091 tcg_temp_free_ptr(xt); \
1092 tcg_temp_free_ptr(xb); \
1095 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \
1096 static void gen_##name(DisasContext *ctx) \
1100 if (unlikely(!ctx->vsx_enabled)) { \
1101 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1104 opc = tcg_const_i32(ctx->opcode); \
1105 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1106 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1107 gen_helper_##name(cpu_env, opc, xa, xb); \
1108 tcg_temp_free_i32(opc); \
1109 tcg_temp_free_ptr(xa); \
1110 tcg_temp_free_ptr(xb); \
1113 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \
1114 static void gen_##name(DisasContext *ctx) \
1118 if (unlikely(!ctx->vsx_enabled)) { \
1119 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1122 opc = tcg_const_i32(ctx->opcode); \
1123 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1124 gen_helper_##name(cpu_env, opc, xb); \
1125 tcg_temp_free_i32(opc); \
1126 tcg_temp_free_ptr(xb); \
1129 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \
1130 static void gen_##name(DisasContext *ctx) \
1133 TCGv_ptr xt, xa, xb; \
1134 if (unlikely(!ctx->vsx_enabled)) { \
1135 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1138 opc = tcg_const_i32(ctx->opcode); \
1139 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1140 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1141 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1142 gen_helper_##name(cpu_env, opc, xt, xa, xb); \
1143 tcg_temp_free_i32(opc); \
1144 tcg_temp_free_ptr(xt); \
1145 tcg_temp_free_ptr(xa); \
1146 tcg_temp_free_ptr(xb); \
1149 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \
1150 static void gen_##name(DisasContext *ctx) \
1154 if (unlikely(!ctx->vsx_enabled)) { \
1155 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1158 opc = tcg_const_i32(ctx->opcode); \
1159 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
1160 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1161 gen_helper_##name(cpu_env, opc, xt, xb); \
1162 tcg_temp_free_i32(opc); \
1163 tcg_temp_free_ptr(xt); \
1164 tcg_temp_free_ptr(xb); \
1167 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \
1168 static void gen_##name(DisasContext *ctx) \
1172 if (unlikely(!ctx->vsx_enabled)) { \
1173 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1176 opc = tcg_const_i32(ctx->opcode); \
1177 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1178 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1179 gen_helper_##name(cpu_env, opc, xa, xb); \
1180 tcg_temp_free_i32(opc); \
1181 tcg_temp_free_ptr(xa); \
1182 tcg_temp_free_ptr(xb); \
1185 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1186 static void gen_##name(DisasContext *ctx) \
1190 if (unlikely(!ctx->vsx_enabled)) { \
1191 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1194 t0 = tcg_temp_new_i64(); \
1195 t1 = tcg_temp_new_i64(); \
1196 get_cpu_vsrh(t0, xB(ctx->opcode)); \
1197 gen_helper_##name(t1, cpu_env, t0); \
1198 set_cpu_vsrh(xT(ctx->opcode), t1); \
1199 tcg_temp_free_i64(t0); \
1200 tcg_temp_free_i64(t1); \
1203 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1204 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1205 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1206 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1207 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1208 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1209 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1210 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1211 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1212 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1213 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1214 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1215 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1216 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1217 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1218 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1219 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1220 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1221 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1222 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1223 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1224 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1225 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1226 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1227 GEN_VSX_HELPER_R3(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
1228 GEN_VSX_HELPER_R3(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
1229 GEN_VSX_HELPER_R3(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
1230 GEN_VSX_HELPER_R3(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
1231 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1232 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1233 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1234 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1235 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1236 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1237 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1238 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1239 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1240 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1241 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1242 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1243 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1244 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1245 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1246 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1247 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1248 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1249 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1250 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1251 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1252 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1253 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1254 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1255 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1256 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1257 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1258 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1259 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1260 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1261 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1262 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1263 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1264 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1265 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1266 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1267 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1268 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1269 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1270 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1271 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1273 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1274 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1275 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1276 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1277 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1278 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1279 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1280 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1281 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1282 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1283 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1284 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1285 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1286 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1287 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1288 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1289 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1290 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1291 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1292 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1293 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1294 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1295 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1296 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1297 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1299 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1300 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1301 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1302 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1303 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1304 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1305 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1306 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1307 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1308 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1309 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1310 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1311 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1312 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1313 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1314 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1315 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1316 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1317 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1318 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1319 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1320 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1321 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1322 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1323 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1324 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1325 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1326 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1327 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1328 GEN_VSX_HELPER_X3(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1329 GEN_VSX_HELPER_X3(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1331 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \
1332 static void gen_##name(DisasContext *ctx) \
1334 TCGv_ptr xt, xa, b, c; \
1335 if (unlikely(!ctx->vsx_enabled)) { \
1336 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1339 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1340 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1341 if (ctx->opcode & PPC_BIT32(25)) { \
1345 b = gen_vsr_ptr(xT(ctx->opcode)); \
1346 c = gen_vsr_ptr(xB(ctx->opcode)); \
1351 b = gen_vsr_ptr(xB(ctx->opcode)); \
1352 c = gen_vsr_ptr(xT(ctx->opcode)); \
1354 gen_helper_##name(cpu_env, xt, xa, b, c); \
1355 tcg_temp_free_ptr(xt); \
1356 tcg_temp_free_ptr(xa); \
1357 tcg_temp_free_ptr(b); \
1358 tcg_temp_free_ptr(c); \
1361 GEN_VSX_HELPER_VSX_MADD(xsmadddp, 0x04, 0x04, 0x05, 0, PPC2_VSX)
1362 GEN_VSX_HELPER_VSX_MADD(xsmsubdp, 0x04, 0x06, 0x07, 0, PPC2_VSX)
1363 GEN_VSX_HELPER_VSX_MADD(xsnmadddp, 0x04, 0x14, 0x15, 0, PPC2_VSX)
1364 GEN_VSX_HELPER_VSX_MADD(xsnmsubdp, 0x04, 0x16, 0x17, 0, PPC2_VSX)
1365 GEN_VSX_HELPER_VSX_MADD(xsmaddsp, 0x04, 0x00, 0x01, 0, PPC2_VSX207)
1366 GEN_VSX_HELPER_VSX_MADD(xsmsubsp, 0x04, 0x02, 0x03, 0, PPC2_VSX207)
1367 GEN_VSX_HELPER_VSX_MADD(xsnmaddsp, 0x04, 0x10, 0x11, 0, PPC2_VSX207)
1368 GEN_VSX_HELPER_VSX_MADD(xsnmsubsp, 0x04, 0x12, 0x13, 0, PPC2_VSX207)
1369 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1370 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1371 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1372 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1373 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1374 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1375 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1376 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1378 static void gen_xxbrd(DisasContext *ctx)
1385 if (unlikely(!ctx->vsx_enabled)) {
1386 gen_exception(ctx, POWERPC_EXCP_VSXU);
1389 xth = tcg_temp_new_i64();
1390 xtl = tcg_temp_new_i64();
1391 xbh = tcg_temp_new_i64();
1392 xbl = tcg_temp_new_i64();
1393 get_cpu_vsrh(xbh, xB(ctx->opcode));
1394 get_cpu_vsrl(xbl, xB(ctx->opcode));
1396 tcg_gen_bswap64_i64(xth, xbh);
1397 tcg_gen_bswap64_i64(xtl, xbl);
1398 set_cpu_vsrh(xT(ctx->opcode), xth);
1399 set_cpu_vsrl(xT(ctx->opcode), xtl);
1401 tcg_temp_free_i64(xth);
1402 tcg_temp_free_i64(xtl);
1403 tcg_temp_free_i64(xbh);
1404 tcg_temp_free_i64(xbl);
1407 static void gen_xxbrh(DisasContext *ctx)
1414 if (unlikely(!ctx->vsx_enabled)) {
1415 gen_exception(ctx, POWERPC_EXCP_VSXU);
1418 xth = tcg_temp_new_i64();
1419 xtl = tcg_temp_new_i64();
1420 xbh = tcg_temp_new_i64();
1421 xbl = tcg_temp_new_i64();
1422 get_cpu_vsrh(xbh, xB(ctx->opcode));
1423 get_cpu_vsrl(xbl, xB(ctx->opcode));
1425 gen_bswap16x8(xth, xtl, xbh, xbl);
1426 set_cpu_vsrh(xT(ctx->opcode), xth);
1427 set_cpu_vsrl(xT(ctx->opcode), xtl);
1429 tcg_temp_free_i64(xth);
1430 tcg_temp_free_i64(xtl);
1431 tcg_temp_free_i64(xbh);
1432 tcg_temp_free_i64(xbl);
1435 static void gen_xxbrq(DisasContext *ctx)
1443 if (unlikely(!ctx->vsx_enabled)) {
1444 gen_exception(ctx, POWERPC_EXCP_VSXU);
1447 xth = tcg_temp_new_i64();
1448 xtl = tcg_temp_new_i64();
1449 xbh = tcg_temp_new_i64();
1450 xbl = tcg_temp_new_i64();
1451 get_cpu_vsrh(xbh, xB(ctx->opcode));
1452 get_cpu_vsrl(xbl, xB(ctx->opcode));
1453 t0 = tcg_temp_new_i64();
1455 tcg_gen_bswap64_i64(t0, xbl);
1456 tcg_gen_bswap64_i64(xtl, xbh);
1457 set_cpu_vsrl(xT(ctx->opcode), xtl);
1458 tcg_gen_mov_i64(xth, t0);
1459 set_cpu_vsrh(xT(ctx->opcode), xth);
1461 tcg_temp_free_i64(t0);
1462 tcg_temp_free_i64(xth);
1463 tcg_temp_free_i64(xtl);
1464 tcg_temp_free_i64(xbh);
1465 tcg_temp_free_i64(xbl);
1468 static void gen_xxbrw(DisasContext *ctx)
1475 if (unlikely(!ctx->vsx_enabled)) {
1476 gen_exception(ctx, POWERPC_EXCP_VSXU);
1479 xth = tcg_temp_new_i64();
1480 xtl = tcg_temp_new_i64();
1481 xbh = tcg_temp_new_i64();
1482 xbl = tcg_temp_new_i64();
1483 get_cpu_vsrh(xbh, xB(ctx->opcode));
1484 get_cpu_vsrl(xbl, xB(ctx->opcode));
1486 gen_bswap32x4(xth, xtl, xbh, xbl);
1487 set_cpu_vsrh(xT(ctx->opcode), xth);
1488 set_cpu_vsrl(xT(ctx->opcode), xtl);
1490 tcg_temp_free_i64(xth);
1491 tcg_temp_free_i64(xtl);
1492 tcg_temp_free_i64(xbh);
1493 tcg_temp_free_i64(xbl);
1496 #define VSX_LOGICAL(name, vece, tcg_op) \
1497 static void glue(gen_, name)(DisasContext *ctx) \
1499 if (unlikely(!ctx->vsx_enabled)) { \
1500 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1503 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1504 vsr_full_offset(xA(ctx->opcode)), \
1505 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1508 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1509 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1510 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1511 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1512 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1513 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1514 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1515 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1517 #define VSX_XXMRG(name, high) \
1518 static void glue(gen_, name)(DisasContext *ctx) \
1520 TCGv_i64 a0, a1, b0, b1, tmp; \
1521 if (unlikely(!ctx->vsx_enabled)) { \
1522 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1525 a0 = tcg_temp_new_i64(); \
1526 a1 = tcg_temp_new_i64(); \
1527 b0 = tcg_temp_new_i64(); \
1528 b1 = tcg_temp_new_i64(); \
1529 tmp = tcg_temp_new_i64(); \
1531 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1532 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1533 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1534 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1536 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1537 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1538 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1539 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1541 tcg_gen_shri_i64(a0, a0, 32); \
1542 tcg_gen_shri_i64(b0, b0, 32); \
1543 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1544 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1545 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1546 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1547 tcg_temp_free_i64(a0); \
1548 tcg_temp_free_i64(a1); \
1549 tcg_temp_free_i64(b0); \
1550 tcg_temp_free_i64(b1); \
1551 tcg_temp_free_i64(tmp); \
1554 VSX_XXMRG(xxmrghw, 1)
1555 VSX_XXMRG(xxmrglw, 0)
1557 static void gen_xxsel(DisasContext *ctx)
1559 int rt = xT(ctx->opcode);
1560 int ra = xA(ctx->opcode);
1561 int rb = xB(ctx->opcode);
1562 int rc = xC(ctx->opcode);
1564 if (unlikely(!ctx->vsx_enabled)) {
1565 gen_exception(ctx, POWERPC_EXCP_VSXU);
1568 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(rt), vsr_full_offset(rc),
1569 vsr_full_offset(rb), vsr_full_offset(ra), 16, 16);
1572 static void gen_xxspltw(DisasContext *ctx)
1574 int rt = xT(ctx->opcode);
1575 int rb = xB(ctx->opcode);
1576 int uim = UIM(ctx->opcode);
1579 if (unlikely(!ctx->vsx_enabled)) {
1580 gen_exception(ctx, POWERPC_EXCP_VSXU);
1584 tofs = vsr_full_offset(rt);
1585 bofs = vsr_full_offset(rb);
1586 bofs += uim << MO_32;
1587 #ifndef HOST_WORDS_BIG_ENDIAN
1591 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1594 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1596 static void gen_xxspltib(DisasContext *ctx)
1598 uint8_t uim8 = IMM8(ctx->opcode);
1599 int rt = xT(ctx->opcode);
1602 if (unlikely(!ctx->vsx_enabled)) {
1603 gen_exception(ctx, POWERPC_EXCP_VSXU);
1607 if (unlikely(!ctx->altivec_enabled)) {
1608 gen_exception(ctx, POWERPC_EXCP_VPU);
1612 tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(rt), 16, 16, uim8);
1615 static void gen_xxsldwi(DisasContext *ctx)
1618 if (unlikely(!ctx->vsx_enabled)) {
1619 gen_exception(ctx, POWERPC_EXCP_VSXU);
1622 xth = tcg_temp_new_i64();
1623 xtl = tcg_temp_new_i64();
1625 switch (SHW(ctx->opcode)) {
1627 get_cpu_vsrh(xth, xA(ctx->opcode));
1628 get_cpu_vsrl(xtl, xA(ctx->opcode));
1632 TCGv_i64 t0 = tcg_temp_new_i64();
1633 get_cpu_vsrh(xth, xA(ctx->opcode));
1634 tcg_gen_shli_i64(xth, xth, 32);
1635 get_cpu_vsrl(t0, xA(ctx->opcode));
1636 tcg_gen_shri_i64(t0, t0, 32);
1637 tcg_gen_or_i64(xth, xth, t0);
1638 get_cpu_vsrl(xtl, xA(ctx->opcode));
1639 tcg_gen_shli_i64(xtl, xtl, 32);
1640 get_cpu_vsrh(t0, xB(ctx->opcode));
1641 tcg_gen_shri_i64(t0, t0, 32);
1642 tcg_gen_or_i64(xtl, xtl, t0);
1643 tcg_temp_free_i64(t0);
1647 get_cpu_vsrl(xth, xA(ctx->opcode));
1648 get_cpu_vsrh(xtl, xB(ctx->opcode));
1652 TCGv_i64 t0 = tcg_temp_new_i64();
1653 get_cpu_vsrl(xth, xA(ctx->opcode));
1654 tcg_gen_shli_i64(xth, xth, 32);
1655 get_cpu_vsrh(t0, xB(ctx->opcode));
1656 tcg_gen_shri_i64(t0, t0, 32);
1657 tcg_gen_or_i64(xth, xth, t0);
1658 get_cpu_vsrh(xtl, xB(ctx->opcode));
1659 tcg_gen_shli_i64(xtl, xtl, 32);
1660 get_cpu_vsrl(t0, xB(ctx->opcode));
1661 tcg_gen_shri_i64(t0, t0, 32);
1662 tcg_gen_or_i64(xtl, xtl, t0);
1663 tcg_temp_free_i64(t0);
1668 set_cpu_vsrh(xT(ctx->opcode), xth);
1669 set_cpu_vsrl(xT(ctx->opcode), xtl);
1671 tcg_temp_free_i64(xth);
1672 tcg_temp_free_i64(xtl);
1675 #define VSX_EXTRACT_INSERT(name) \
1676 static void gen_##name(DisasContext *ctx) \
1681 uint8_t uimm = UIMM4(ctx->opcode); \
1683 if (unlikely(!ctx->vsx_enabled)) { \
1684 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1687 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1688 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1689 t0 = tcg_temp_new_i32(); \
1690 t1 = tcg_temp_new_i64(); \
1692 * uimm > 15 out of bound and for \
1693 * uimm > 12 handle as per hardware in helper \
1696 tcg_gen_movi_i64(t1, 0); \
1697 set_cpu_vsrh(xT(ctx->opcode), t1); \
1698 set_cpu_vsrl(xT(ctx->opcode), t1); \
1701 tcg_gen_movi_i32(t0, uimm); \
1702 gen_helper_##name(cpu_env, xt, xb, t0); \
1703 tcg_temp_free_ptr(xb); \
1704 tcg_temp_free_ptr(xt); \
1705 tcg_temp_free_i32(t0); \
1706 tcg_temp_free_i64(t1); \
1709 VSX_EXTRACT_INSERT(xxextractuw)
1710 VSX_EXTRACT_INSERT(xxinsertw)
1713 static void gen_xsxexpdp(DisasContext *ctx)
1715 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1717 if (unlikely(!ctx->vsx_enabled)) {
1718 gen_exception(ctx, POWERPC_EXCP_VSXU);
1721 t0 = tcg_temp_new_i64();
1722 get_cpu_vsrh(t0, xB(ctx->opcode));
1723 tcg_gen_extract_i64(rt, t0, 52, 11);
1724 tcg_temp_free_i64(t0);
1727 static void gen_xsxexpqp(DisasContext *ctx)
1733 if (unlikely(!ctx->vsx_enabled)) {
1734 gen_exception(ctx, POWERPC_EXCP_VSXU);
1737 xth = tcg_temp_new_i64();
1738 xtl = tcg_temp_new_i64();
1739 xbh = tcg_temp_new_i64();
1740 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1742 tcg_gen_extract_i64(xth, xbh, 48, 15);
1743 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1744 tcg_gen_movi_i64(xtl, 0);
1745 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1747 tcg_temp_free_i64(xbh);
1748 tcg_temp_free_i64(xth);
1749 tcg_temp_free_i64(xtl);
1752 static void gen_xsiexpdp(DisasContext *ctx)
1755 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1756 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1759 if (unlikely(!ctx->vsx_enabled)) {
1760 gen_exception(ctx, POWERPC_EXCP_VSXU);
1763 t0 = tcg_temp_new_i64();
1764 xth = tcg_temp_new_i64();
1765 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1766 tcg_gen_andi_i64(t0, rb, 0x7FF);
1767 tcg_gen_shli_i64(t0, t0, 52);
1768 tcg_gen_or_i64(xth, xth, t0);
1769 set_cpu_vsrh(xT(ctx->opcode), xth);
1770 /* dword[1] is undefined */
1771 tcg_temp_free_i64(t0);
1772 tcg_temp_free_i64(xth);
1775 static void gen_xsiexpqp(DisasContext *ctx)
1784 if (unlikely(!ctx->vsx_enabled)) {
1785 gen_exception(ctx, POWERPC_EXCP_VSXU);
1788 xth = tcg_temp_new_i64();
1789 xtl = tcg_temp_new_i64();
1790 xah = tcg_temp_new_i64();
1791 xal = tcg_temp_new_i64();
1792 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1793 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1794 xbh = tcg_temp_new_i64();
1795 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1796 t0 = tcg_temp_new_i64();
1798 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1799 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1800 tcg_gen_shli_i64(t0, t0, 48);
1801 tcg_gen_or_i64(xth, xth, t0);
1802 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1803 tcg_gen_mov_i64(xtl, xal);
1804 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1806 tcg_temp_free_i64(t0);
1807 tcg_temp_free_i64(xth);
1808 tcg_temp_free_i64(xtl);
1809 tcg_temp_free_i64(xah);
1810 tcg_temp_free_i64(xal);
1811 tcg_temp_free_i64(xbh);
1814 static void gen_xsxsigdp(DisasContext *ctx)
1816 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1817 TCGv_i64 t0, t1, zr, nan, exp;
1819 if (unlikely(!ctx->vsx_enabled)) {
1820 gen_exception(ctx, POWERPC_EXCP_VSXU);
1823 exp = tcg_temp_new_i64();
1824 t0 = tcg_temp_new_i64();
1825 t1 = tcg_temp_new_i64();
1826 zr = tcg_const_i64(0);
1827 nan = tcg_const_i64(2047);
1829 get_cpu_vsrh(t1, xB(ctx->opcode));
1830 tcg_gen_extract_i64(exp, t1, 52, 11);
1831 tcg_gen_movi_i64(t0, 0x0010000000000000);
1832 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1833 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1834 get_cpu_vsrh(t1, xB(ctx->opcode));
1835 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1837 tcg_temp_free_i64(t0);
1838 tcg_temp_free_i64(t1);
1839 tcg_temp_free_i64(exp);
1840 tcg_temp_free_i64(zr);
1841 tcg_temp_free_i64(nan);
1844 static void gen_xsxsigqp(DisasContext *ctx)
1846 TCGv_i64 t0, zr, nan, exp;
1852 if (unlikely(!ctx->vsx_enabled)) {
1853 gen_exception(ctx, POWERPC_EXCP_VSXU);
1856 xth = tcg_temp_new_i64();
1857 xtl = tcg_temp_new_i64();
1858 xbh = tcg_temp_new_i64();
1859 xbl = tcg_temp_new_i64();
1860 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1861 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1862 exp = tcg_temp_new_i64();
1863 t0 = tcg_temp_new_i64();
1864 zr = tcg_const_i64(0);
1865 nan = tcg_const_i64(32767);
1867 tcg_gen_extract_i64(exp, xbh, 48, 15);
1868 tcg_gen_movi_i64(t0, 0x0001000000000000);
1869 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1870 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1871 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1872 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1873 tcg_gen_mov_i64(xtl, xbl);
1874 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1876 tcg_temp_free_i64(t0);
1877 tcg_temp_free_i64(exp);
1878 tcg_temp_free_i64(zr);
1879 tcg_temp_free_i64(nan);
1880 tcg_temp_free_i64(xth);
1881 tcg_temp_free_i64(xtl);
1882 tcg_temp_free_i64(xbh);
1883 tcg_temp_free_i64(xbl);
1887 static void gen_xviexpsp(DisasContext *ctx)
1897 if (unlikely(!ctx->vsx_enabled)) {
1898 gen_exception(ctx, POWERPC_EXCP_VSXU);
1901 xth = tcg_temp_new_i64();
1902 xtl = tcg_temp_new_i64();
1903 xah = tcg_temp_new_i64();
1904 xal = tcg_temp_new_i64();
1905 xbh = tcg_temp_new_i64();
1906 xbl = tcg_temp_new_i64();
1907 get_cpu_vsrh(xah, xA(ctx->opcode));
1908 get_cpu_vsrl(xal, xA(ctx->opcode));
1909 get_cpu_vsrh(xbh, xB(ctx->opcode));
1910 get_cpu_vsrl(xbl, xB(ctx->opcode));
1911 t0 = tcg_temp_new_i64();
1913 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1914 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1915 tcg_gen_shli_i64(t0, t0, 23);
1916 tcg_gen_or_i64(xth, xth, t0);
1917 set_cpu_vsrh(xT(ctx->opcode), xth);
1918 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1919 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1920 tcg_gen_shli_i64(t0, t0, 23);
1921 tcg_gen_or_i64(xtl, xtl, t0);
1922 set_cpu_vsrl(xT(ctx->opcode), xtl);
1924 tcg_temp_free_i64(t0);
1925 tcg_temp_free_i64(xth);
1926 tcg_temp_free_i64(xtl);
1927 tcg_temp_free_i64(xah);
1928 tcg_temp_free_i64(xal);
1929 tcg_temp_free_i64(xbh);
1930 tcg_temp_free_i64(xbl);
1933 static void gen_xviexpdp(DisasContext *ctx)
1942 if (unlikely(!ctx->vsx_enabled)) {
1943 gen_exception(ctx, POWERPC_EXCP_VSXU);
1946 xth = tcg_temp_new_i64();
1947 xtl = tcg_temp_new_i64();
1948 xah = tcg_temp_new_i64();
1949 xal = tcg_temp_new_i64();
1950 xbh = tcg_temp_new_i64();
1951 xbl = tcg_temp_new_i64();
1952 get_cpu_vsrh(xah, xA(ctx->opcode));
1953 get_cpu_vsrl(xal, xA(ctx->opcode));
1954 get_cpu_vsrh(xbh, xB(ctx->opcode));
1955 get_cpu_vsrl(xbl, xB(ctx->opcode));
1957 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1958 set_cpu_vsrh(xT(ctx->opcode), xth);
1960 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1961 set_cpu_vsrl(xT(ctx->opcode), xtl);
1963 tcg_temp_free_i64(xth);
1964 tcg_temp_free_i64(xtl);
1965 tcg_temp_free_i64(xah);
1966 tcg_temp_free_i64(xal);
1967 tcg_temp_free_i64(xbh);
1968 tcg_temp_free_i64(xbl);
1971 static void gen_xvxexpsp(DisasContext *ctx)
1978 if (unlikely(!ctx->vsx_enabled)) {
1979 gen_exception(ctx, POWERPC_EXCP_VSXU);
1982 xth = tcg_temp_new_i64();
1983 xtl = tcg_temp_new_i64();
1984 xbh = tcg_temp_new_i64();
1985 xbl = tcg_temp_new_i64();
1986 get_cpu_vsrh(xbh, xB(ctx->opcode));
1987 get_cpu_vsrl(xbl, xB(ctx->opcode));
1989 tcg_gen_shri_i64(xth, xbh, 23);
1990 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1991 set_cpu_vsrh(xT(ctx->opcode), xth);
1992 tcg_gen_shri_i64(xtl, xbl, 23);
1993 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1994 set_cpu_vsrl(xT(ctx->opcode), xtl);
1996 tcg_temp_free_i64(xth);
1997 tcg_temp_free_i64(xtl);
1998 tcg_temp_free_i64(xbh);
1999 tcg_temp_free_i64(xbl);
2002 static void gen_xvxexpdp(DisasContext *ctx)
2009 if (unlikely(!ctx->vsx_enabled)) {
2010 gen_exception(ctx, POWERPC_EXCP_VSXU);
2013 xth = tcg_temp_new_i64();
2014 xtl = tcg_temp_new_i64();
2015 xbh = tcg_temp_new_i64();
2016 xbl = tcg_temp_new_i64();
2017 get_cpu_vsrh(xbh, xB(ctx->opcode));
2018 get_cpu_vsrl(xbl, xB(ctx->opcode));
2020 tcg_gen_extract_i64(xth, xbh, 52, 11);
2021 set_cpu_vsrh(xT(ctx->opcode), xth);
2022 tcg_gen_extract_i64(xtl, xbl, 52, 11);
2023 set_cpu_vsrl(xT(ctx->opcode), xtl);
2025 tcg_temp_free_i64(xth);
2026 tcg_temp_free_i64(xtl);
2027 tcg_temp_free_i64(xbh);
2028 tcg_temp_free_i64(xbl);
2031 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2033 static void gen_xvxsigdp(DisasContext *ctx)
2039 TCGv_i64 t0, zr, nan, exp;
2041 if (unlikely(!ctx->vsx_enabled)) {
2042 gen_exception(ctx, POWERPC_EXCP_VSXU);
2045 xth = tcg_temp_new_i64();
2046 xtl = tcg_temp_new_i64();
2047 xbh = tcg_temp_new_i64();
2048 xbl = tcg_temp_new_i64();
2049 get_cpu_vsrh(xbh, xB(ctx->opcode));
2050 get_cpu_vsrl(xbl, xB(ctx->opcode));
2051 exp = tcg_temp_new_i64();
2052 t0 = tcg_temp_new_i64();
2053 zr = tcg_const_i64(0);
2054 nan = tcg_const_i64(2047);
2056 tcg_gen_extract_i64(exp, xbh, 52, 11);
2057 tcg_gen_movi_i64(t0, 0x0010000000000000);
2058 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2059 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2060 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2061 set_cpu_vsrh(xT(ctx->opcode), xth);
2063 tcg_gen_extract_i64(exp, xbl, 52, 11);
2064 tcg_gen_movi_i64(t0, 0x0010000000000000);
2065 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2066 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2067 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2068 set_cpu_vsrl(xT(ctx->opcode), xtl);
2070 tcg_temp_free_i64(t0);
2071 tcg_temp_free_i64(exp);
2072 tcg_temp_free_i64(zr);
2073 tcg_temp_free_i64(nan);
2074 tcg_temp_free_i64(xth);
2075 tcg_temp_free_i64(xtl);
2076 tcg_temp_free_i64(xbh);
2077 tcg_temp_free_i64(xbl);
2083 #undef GEN_XX3_RC_FORM
2084 #undef GEN_XX3FORM_DM