1 /*** VSX extension ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15 TCGv_ptr r = tcg_temp_new_ptr();
16 tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
20 #define VSX_LOAD_SCALAR(name, operation) \
21 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 t0 = tcg_temp_new_i64(); \
30 gen_set_access_type(ctx, ACCESS_INT); \
31 EA = tcg_temp_new(); \
32 gen_addr_reg_index(ctx, EA); \
33 gen_qemu_##operation(ctx, t0, EA); \
34 set_cpu_vsr(xT(ctx->opcode), t0, true); \
35 /* NOTE: cpu_vsrl is undefined */ \
37 tcg_temp_free_i64(t0); \
40 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45 VSX_LOAD_SCALAR(lxsspx, ld32fs)
47 static void gen_lxvd2x(DisasContext *ctx)
51 if (unlikely(!ctx->vsx_enabled)) {
52 gen_exception(ctx, POWERPC_EXCP_VSXU);
55 t0 = tcg_temp_new_i64();
56 gen_set_access_type(ctx, ACCESS_INT);
58 gen_addr_reg_index(ctx, EA);
59 gen_qemu_ld64_i64(ctx, t0, EA);
60 set_cpu_vsr(xT(ctx->opcode), t0, true);
61 tcg_gen_addi_tl(EA, EA, 8);
62 gen_qemu_ld64_i64(ctx, t0, EA);
63 set_cpu_vsr(xT(ctx->opcode), t0, false);
65 tcg_temp_free_i64(t0);
68 static void gen_lxvw4x(DisasContext *ctx)
73 if (unlikely(!ctx->vsx_enabled)) {
74 gen_exception(ctx, POWERPC_EXCP_VSXU);
77 xth = tcg_temp_new_i64();
78 xtl = tcg_temp_new_i64();
80 gen_set_access_type(ctx, ACCESS_INT);
83 gen_addr_reg_index(ctx, EA);
85 TCGv_i64 t0 = tcg_temp_new_i64();
86 TCGv_i64 t1 = tcg_temp_new_i64();
88 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89 tcg_gen_shri_i64(t1, t0, 32);
90 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91 tcg_gen_addi_tl(EA, EA, 8);
92 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93 tcg_gen_shri_i64(t1, t0, 32);
94 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95 tcg_temp_free_i64(t0);
96 tcg_temp_free_i64(t1);
98 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99 tcg_gen_addi_tl(EA, EA, 8);
100 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
102 set_cpu_vsr(xT(ctx->opcode), xth, true);
103 set_cpu_vsr(xT(ctx->opcode), xtl, false);
105 tcg_temp_free_i64(xth);
106 tcg_temp_free_i64(xtl);
109 static void gen_lxvwsx(DisasContext *ctx)
114 if (xT(ctx->opcode) < 32) {
115 if (unlikely(!ctx->vsx_enabled)) {
116 gen_exception(ctx, POWERPC_EXCP_VSXU);
120 if (unlikely(!ctx->altivec_enabled)) {
121 gen_exception(ctx, POWERPC_EXCP_VPU);
126 gen_set_access_type(ctx, ACCESS_INT);
129 gen_addr_reg_index(ctx, EA);
131 data = tcg_temp_new_i32();
132 tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133 tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
136 tcg_temp_free_i32(data);
139 static void gen_lxvdsx(DisasContext *ctx)
144 if (unlikely(!ctx->vsx_enabled)) {
145 gen_exception(ctx, POWERPC_EXCP_VSXU);
149 gen_set_access_type(ctx, ACCESS_INT);
152 gen_addr_reg_index(ctx, EA);
154 data = tcg_temp_new_i64();
155 tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156 tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
159 tcg_temp_free_i64(data);
162 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163 TCGv_i64 inh, TCGv_i64 inl)
165 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166 TCGv_i64 t0 = tcg_temp_new_i64();
167 TCGv_i64 t1 = tcg_temp_new_i64();
169 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170 tcg_gen_and_i64(t0, inh, mask);
171 tcg_gen_shli_i64(t0, t0, 8);
172 tcg_gen_shri_i64(t1, inh, 8);
173 tcg_gen_and_i64(t1, t1, mask);
174 tcg_gen_or_i64(outh, t0, t1);
176 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177 tcg_gen_and_i64(t0, inl, mask);
178 tcg_gen_shli_i64(t0, t0, 8);
179 tcg_gen_shri_i64(t1, inl, 8);
180 tcg_gen_and_i64(t1, t1, mask);
181 tcg_gen_or_i64(outl, t0, t1);
183 tcg_temp_free_i64(t0);
184 tcg_temp_free_i64(t1);
185 tcg_temp_free_i64(mask);
188 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189 TCGv_i64 inh, TCGv_i64 inl)
191 TCGv_i64 hi = tcg_temp_new_i64();
192 TCGv_i64 lo = tcg_temp_new_i64();
194 tcg_gen_bswap64_i64(hi, inh);
195 tcg_gen_bswap64_i64(lo, inl);
196 tcg_gen_shri_i64(outh, hi, 32);
197 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198 tcg_gen_shri_i64(outl, lo, 32);
199 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
201 tcg_temp_free_i64(hi);
202 tcg_temp_free_i64(lo);
204 static void gen_lxvh8x(DisasContext *ctx)
210 if (unlikely(!ctx->vsx_enabled)) {
211 gen_exception(ctx, POWERPC_EXCP_VSXU);
214 xth = tcg_temp_new_i64();
215 xtl = tcg_temp_new_i64();
216 gen_set_access_type(ctx, ACCESS_INT);
219 gen_addr_reg_index(ctx, EA);
220 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221 tcg_gen_addi_tl(EA, EA, 8);
222 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
224 gen_bswap16x8(xth, xtl, xth, xtl);
226 set_cpu_vsr(xT(ctx->opcode), xth, true);
227 set_cpu_vsr(xT(ctx->opcode), xtl, false);
229 tcg_temp_free_i64(xth);
230 tcg_temp_free_i64(xtl);
233 static void gen_lxvb16x(DisasContext *ctx)
239 if (unlikely(!ctx->vsx_enabled)) {
240 gen_exception(ctx, POWERPC_EXCP_VSXU);
243 xth = tcg_temp_new_i64();
244 xtl = tcg_temp_new_i64();
245 gen_set_access_type(ctx, ACCESS_INT);
247 gen_addr_reg_index(ctx, EA);
248 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249 tcg_gen_addi_tl(EA, EA, 8);
250 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251 set_cpu_vsr(xT(ctx->opcode), xth, true);
252 set_cpu_vsr(xT(ctx->opcode), xtl, false);
254 tcg_temp_free_i64(xth);
255 tcg_temp_free_i64(xtl);
259 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
260 static void gen_##name(DisasContext *ctx) \
265 if (xT(ctx->opcode) < 32) { \
266 if (unlikely(!ctx->vsx_enabled)) { \
267 gen_exception(ctx, POWERPC_EXCP_VSXU); \
271 if (unlikely(!ctx->altivec_enabled)) { \
272 gen_exception(ctx, POWERPC_EXCP_VPU); \
276 EA = tcg_temp_new(); \
277 xt = gen_vsr_ptr(xT(ctx->opcode)); \
278 gen_set_access_type(ctx, ACCESS_INT); \
279 gen_addr_register(ctx, EA); \
280 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
282 tcg_temp_free_ptr(xt); \
285 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
291 #define VSX_LOAD_SCALAR_DS(name, operation) \
292 static void gen_##name(DisasContext *ctx) \
297 if (unlikely(!ctx->altivec_enabled)) { \
298 gen_exception(ctx, POWERPC_EXCP_VPU); \
301 xth = tcg_temp_new_i64(); \
302 gen_set_access_type(ctx, ACCESS_INT); \
303 EA = tcg_temp_new(); \
304 gen_addr_imm_index(ctx, EA, 0x03); \
305 gen_qemu_##operation(ctx, xth, EA); \
306 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true); \
307 /* NOTE: cpu_vsrl is undefined */ \
309 tcg_temp_free_i64(xth); \
312 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
313 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
315 #define VSX_STORE_SCALAR(name, operation) \
316 static void gen_##name(DisasContext *ctx) \
320 if (unlikely(!ctx->vsx_enabled)) { \
321 gen_exception(ctx, POWERPC_EXCP_VSXU); \
324 t0 = tcg_temp_new_i64(); \
325 gen_set_access_type(ctx, ACCESS_INT); \
326 EA = tcg_temp_new(); \
327 gen_addr_reg_index(ctx, EA); \
328 get_cpu_vsr(t0, xS(ctx->opcode), true); \
329 gen_qemu_##operation(ctx, t0, EA); \
331 tcg_temp_free_i64(t0); \
334 VSX_STORE_SCALAR(stxsdx, st64_i64)
336 VSX_STORE_SCALAR(stxsibx, st8_i64)
337 VSX_STORE_SCALAR(stxsihx, st16_i64)
338 VSX_STORE_SCALAR(stxsiwx, st32_i64)
339 VSX_STORE_SCALAR(stxsspx, st32fs)
341 static void gen_stxvd2x(DisasContext *ctx)
345 if (unlikely(!ctx->vsx_enabled)) {
346 gen_exception(ctx, POWERPC_EXCP_VSXU);
349 t0 = tcg_temp_new_i64();
350 gen_set_access_type(ctx, ACCESS_INT);
352 gen_addr_reg_index(ctx, EA);
353 get_cpu_vsr(t0, xS(ctx->opcode), true);
354 gen_qemu_st64_i64(ctx, t0, EA);
355 tcg_gen_addi_tl(EA, EA, 8);
356 get_cpu_vsr(t0, xS(ctx->opcode), false);
357 gen_qemu_st64_i64(ctx, t0, EA);
359 tcg_temp_free_i64(t0);
362 static void gen_stxvw4x(DisasContext *ctx)
368 if (unlikely(!ctx->vsx_enabled)) {
369 gen_exception(ctx, POWERPC_EXCP_VSXU);
372 xsh = tcg_temp_new_i64();
373 xsl = tcg_temp_new_i64();
374 get_cpu_vsr(xsh, xS(ctx->opcode), true);
375 get_cpu_vsr(xsl, xS(ctx->opcode), false);
376 gen_set_access_type(ctx, ACCESS_INT);
378 gen_addr_reg_index(ctx, EA);
380 TCGv_i64 t0 = tcg_temp_new_i64();
381 TCGv_i64 t1 = tcg_temp_new_i64();
383 tcg_gen_shri_i64(t0, xsh, 32);
384 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
385 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
386 tcg_gen_addi_tl(EA, EA, 8);
387 tcg_gen_shri_i64(t0, xsl, 32);
388 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
389 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
390 tcg_temp_free_i64(t0);
391 tcg_temp_free_i64(t1);
393 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
394 tcg_gen_addi_tl(EA, EA, 8);
395 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
398 tcg_temp_free_i64(xsh);
399 tcg_temp_free_i64(xsl);
402 static void gen_stxvh8x(DisasContext *ctx)
408 if (unlikely(!ctx->vsx_enabled)) {
409 gen_exception(ctx, POWERPC_EXCP_VSXU);
412 xsh = tcg_temp_new_i64();
413 xsl = tcg_temp_new_i64();
414 get_cpu_vsr(xsh, xS(ctx->opcode), true);
415 get_cpu_vsr(xsl, xS(ctx->opcode), false);
416 gen_set_access_type(ctx, ACCESS_INT);
418 gen_addr_reg_index(ctx, EA);
420 TCGv_i64 outh = tcg_temp_new_i64();
421 TCGv_i64 outl = tcg_temp_new_i64();
423 gen_bswap16x8(outh, outl, xsh, xsl);
424 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
425 tcg_gen_addi_tl(EA, EA, 8);
426 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
427 tcg_temp_free_i64(outh);
428 tcg_temp_free_i64(outl);
430 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
431 tcg_gen_addi_tl(EA, EA, 8);
432 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
435 tcg_temp_free_i64(xsh);
436 tcg_temp_free_i64(xsl);
439 static void gen_stxvb16x(DisasContext *ctx)
445 if (unlikely(!ctx->vsx_enabled)) {
446 gen_exception(ctx, POWERPC_EXCP_VSXU);
449 xsh = tcg_temp_new_i64();
450 xsl = tcg_temp_new_i64();
451 get_cpu_vsr(xsh, xS(ctx->opcode), true);
452 get_cpu_vsr(xsl, xS(ctx->opcode), false);
453 gen_set_access_type(ctx, ACCESS_INT);
455 gen_addr_reg_index(ctx, EA);
456 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
457 tcg_gen_addi_tl(EA, EA, 8);
458 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
460 tcg_temp_free_i64(xsh);
461 tcg_temp_free_i64(xsl);
464 #define VSX_STORE_SCALAR_DS(name, operation) \
465 static void gen_##name(DisasContext *ctx) \
470 if (unlikely(!ctx->altivec_enabled)) { \
471 gen_exception(ctx, POWERPC_EXCP_VPU); \
474 xth = tcg_temp_new_i64(); \
475 get_cpu_vsr(xth, rD(ctx->opcode) + 32, true); \
476 gen_set_access_type(ctx, ACCESS_INT); \
477 EA = tcg_temp_new(); \
478 gen_addr_imm_index(ctx, EA, 0x03); \
479 gen_qemu_##operation(ctx, xth, EA); \
480 /* NOTE: cpu_vsrl is undefined */ \
482 tcg_temp_free_i64(xth); \
485 VSX_STORE_SCALAR_DS(stxsd, st64_i64)
486 VSX_STORE_SCALAR_DS(stxssp, st32fs)
488 static void gen_mfvsrwz(DisasContext *ctx)
490 if (xS(ctx->opcode) < 32) {
491 if (unlikely(!ctx->fpu_enabled)) {
492 gen_exception(ctx, POWERPC_EXCP_FPU);
496 if (unlikely(!ctx->altivec_enabled)) {
497 gen_exception(ctx, POWERPC_EXCP_VPU);
501 TCGv_i64 tmp = tcg_temp_new_i64();
502 TCGv_i64 xsh = tcg_temp_new_i64();
503 get_cpu_vsr(xsh, xS(ctx->opcode), true);
504 tcg_gen_ext32u_i64(tmp, xsh);
505 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
506 tcg_temp_free_i64(tmp);
507 tcg_temp_free_i64(xsh);
510 static void gen_mtvsrwa(DisasContext *ctx)
512 if (xS(ctx->opcode) < 32) {
513 if (unlikely(!ctx->fpu_enabled)) {
514 gen_exception(ctx, POWERPC_EXCP_FPU);
518 if (unlikely(!ctx->altivec_enabled)) {
519 gen_exception(ctx, POWERPC_EXCP_VPU);
523 TCGv_i64 tmp = tcg_temp_new_i64();
524 TCGv_i64 xsh = tcg_temp_new_i64();
525 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
526 tcg_gen_ext32s_i64(xsh, tmp);
527 set_cpu_vsr(xT(ctx->opcode), xsh, true);
528 tcg_temp_free_i64(tmp);
529 tcg_temp_free_i64(xsh);
532 static void gen_mtvsrwz(DisasContext *ctx)
534 if (xS(ctx->opcode) < 32) {
535 if (unlikely(!ctx->fpu_enabled)) {
536 gen_exception(ctx, POWERPC_EXCP_FPU);
540 if (unlikely(!ctx->altivec_enabled)) {
541 gen_exception(ctx, POWERPC_EXCP_VPU);
545 TCGv_i64 tmp = tcg_temp_new_i64();
546 TCGv_i64 xsh = tcg_temp_new_i64();
547 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
548 tcg_gen_ext32u_i64(xsh, tmp);
549 set_cpu_vsr(xT(ctx->opcode), xsh, true);
550 tcg_temp_free_i64(tmp);
551 tcg_temp_free_i64(xsh);
554 #if defined(TARGET_PPC64)
555 static void gen_mfvsrd(DisasContext *ctx)
558 if (xS(ctx->opcode) < 32) {
559 if (unlikely(!ctx->fpu_enabled)) {
560 gen_exception(ctx, POWERPC_EXCP_FPU);
564 if (unlikely(!ctx->altivec_enabled)) {
565 gen_exception(ctx, POWERPC_EXCP_VPU);
569 t0 = tcg_temp_new_i64();
570 get_cpu_vsr(t0, xS(ctx->opcode), true);
571 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
572 tcg_temp_free_i64(t0);
575 static void gen_mtvsrd(DisasContext *ctx)
578 if (xS(ctx->opcode) < 32) {
579 if (unlikely(!ctx->fpu_enabled)) {
580 gen_exception(ctx, POWERPC_EXCP_FPU);
584 if (unlikely(!ctx->altivec_enabled)) {
585 gen_exception(ctx, POWERPC_EXCP_VPU);
589 t0 = tcg_temp_new_i64();
590 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
591 set_cpu_vsr(xT(ctx->opcode), t0, true);
592 tcg_temp_free_i64(t0);
595 static void gen_mfvsrld(DisasContext *ctx)
598 if (xS(ctx->opcode) < 32) {
599 if (unlikely(!ctx->vsx_enabled)) {
600 gen_exception(ctx, POWERPC_EXCP_VSXU);
604 if (unlikely(!ctx->altivec_enabled)) {
605 gen_exception(ctx, POWERPC_EXCP_VPU);
609 t0 = tcg_temp_new_i64();
610 get_cpu_vsr(t0, xS(ctx->opcode), false);
611 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
612 tcg_temp_free_i64(t0);
615 static void gen_mtvsrdd(DisasContext *ctx)
618 if (xT(ctx->opcode) < 32) {
619 if (unlikely(!ctx->vsx_enabled)) {
620 gen_exception(ctx, POWERPC_EXCP_VSXU);
624 if (unlikely(!ctx->altivec_enabled)) {
625 gen_exception(ctx, POWERPC_EXCP_VPU);
630 t0 = tcg_temp_new_i64();
631 if (!rA(ctx->opcode)) {
632 tcg_gen_movi_i64(t0, 0);
634 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
636 set_cpu_vsr(xT(ctx->opcode), t0, true);
638 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
639 set_cpu_vsr(xT(ctx->opcode), t0, false);
640 tcg_temp_free_i64(t0);
643 static void gen_mtvsrws(DisasContext *ctx)
646 if (xT(ctx->opcode) < 32) {
647 if (unlikely(!ctx->vsx_enabled)) {
648 gen_exception(ctx, POWERPC_EXCP_VSXU);
652 if (unlikely(!ctx->altivec_enabled)) {
653 gen_exception(ctx, POWERPC_EXCP_VPU);
658 t0 = tcg_temp_new_i64();
659 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
660 cpu_gpr[rA(ctx->opcode)], 32, 32);
661 set_cpu_vsr(xT(ctx->opcode), t0, false);
662 set_cpu_vsr(xT(ctx->opcode), t0, true);
663 tcg_temp_free_i64(t0);
672 #define SGN_MASK_DP 0x8000000000000000ull
673 #define SGN_MASK_SP 0x8000000080000000ull
675 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
676 static void glue(gen_, name)(DisasContext *ctx) \
679 if (unlikely(!ctx->vsx_enabled)) { \
680 gen_exception(ctx, POWERPC_EXCP_VSXU); \
683 xb = tcg_temp_new_i64(); \
684 sgm = tcg_temp_new_i64(); \
685 get_cpu_vsr(xb, xB(ctx->opcode), true); \
686 tcg_gen_movi_i64(sgm, sgn_mask); \
689 tcg_gen_andc_i64(xb, xb, sgm); \
693 tcg_gen_or_i64(xb, xb, sgm); \
697 tcg_gen_xor_i64(xb, xb, sgm); \
701 TCGv_i64 xa = tcg_temp_new_i64(); \
702 get_cpu_vsr(xa, xA(ctx->opcode), true); \
703 tcg_gen_and_i64(xa, xa, sgm); \
704 tcg_gen_andc_i64(xb, xb, sgm); \
705 tcg_gen_or_i64(xb, xb, xa); \
706 tcg_temp_free_i64(xa); \
710 set_cpu_vsr(xT(ctx->opcode), xb, true); \
711 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
712 tcg_temp_free_i64(xb); \
713 tcg_temp_free_i64(sgm); \
716 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
717 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
718 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
719 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
721 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
722 static void glue(gen_, name)(DisasContext *ctx) \
725 int xt = rD(ctx->opcode) + 32; \
726 int xb = rB(ctx->opcode) + 32; \
727 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
729 if (unlikely(!ctx->vsx_enabled)) { \
730 gen_exception(ctx, POWERPC_EXCP_VSXU); \
733 xbh = tcg_temp_new_i64(); \
734 xbl = tcg_temp_new_i64(); \
735 sgm = tcg_temp_new_i64(); \
736 tmp = tcg_temp_new_i64(); \
737 get_cpu_vsr(xbh, xb, true); \
738 get_cpu_vsr(xbl, xb, false); \
739 tcg_gen_movi_i64(sgm, sgn_mask); \
742 tcg_gen_andc_i64(xbh, xbh, sgm); \
745 tcg_gen_or_i64(xbh, xbh, sgm); \
748 tcg_gen_xor_i64(xbh, xbh, sgm); \
751 xah = tcg_temp_new_i64(); \
752 xa = rA(ctx->opcode) + 32; \
753 get_cpu_vsr(tmp, xa, true); \
754 tcg_gen_and_i64(xah, tmp, sgm); \
755 tcg_gen_andc_i64(xbh, xbh, sgm); \
756 tcg_gen_or_i64(xbh, xbh, xah); \
757 tcg_temp_free_i64(xah); \
760 set_cpu_vsr(xt, xbh, true); \
761 set_cpu_vsr(xt, xbl, false); \
762 tcg_temp_free_i64(xbl); \
763 tcg_temp_free_i64(xbh); \
764 tcg_temp_free_i64(sgm); \
765 tcg_temp_free_i64(tmp); \
768 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
769 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
770 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
771 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
773 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
774 static void glue(gen_, name)(DisasContext *ctx) \
776 TCGv_i64 xbh, xbl, sgm; \
777 if (unlikely(!ctx->vsx_enabled)) { \
778 gen_exception(ctx, POWERPC_EXCP_VSXU); \
781 xbh = tcg_temp_new_i64(); \
782 xbl = tcg_temp_new_i64(); \
783 sgm = tcg_temp_new_i64(); \
784 get_cpu_vsr(xbh, xB(ctx->opcode), true); \
785 get_cpu_vsr(xbl, xB(ctx->opcode), false); \
786 tcg_gen_movi_i64(sgm, sgn_mask); \
789 tcg_gen_andc_i64(xbh, xbh, sgm); \
790 tcg_gen_andc_i64(xbl, xbl, sgm); \
794 tcg_gen_or_i64(xbh, xbh, sgm); \
795 tcg_gen_or_i64(xbl, xbl, sgm); \
799 tcg_gen_xor_i64(xbh, xbh, sgm); \
800 tcg_gen_xor_i64(xbl, xbl, sgm); \
804 TCGv_i64 xah = tcg_temp_new_i64(); \
805 TCGv_i64 xal = tcg_temp_new_i64(); \
806 get_cpu_vsr(xah, xA(ctx->opcode), true); \
807 get_cpu_vsr(xal, xA(ctx->opcode), false); \
808 tcg_gen_and_i64(xah, xah, sgm); \
809 tcg_gen_and_i64(xal, xal, sgm); \
810 tcg_gen_andc_i64(xbh, xbh, sgm); \
811 tcg_gen_andc_i64(xbl, xbl, sgm); \
812 tcg_gen_or_i64(xbh, xbh, xah); \
813 tcg_gen_or_i64(xbl, xbl, xal); \
814 tcg_temp_free_i64(xah); \
815 tcg_temp_free_i64(xal); \
819 set_cpu_vsr(xT(ctx->opcode), xbh, true); \
820 set_cpu_vsr(xT(ctx->opcode), xbl, false); \
821 tcg_temp_free_i64(xbh); \
822 tcg_temp_free_i64(xbl); \
823 tcg_temp_free_i64(sgm); \
826 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
827 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
828 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
829 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
830 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
831 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
832 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
833 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
835 #define VSX_CMP(name, op1, op2, inval, type) \
836 static void gen_##name(DisasContext *ctx) \
839 TCGv_ptr xt, xa, xb; \
840 if (unlikely(!ctx->vsx_enabled)) { \
841 gen_exception(ctx, POWERPC_EXCP_VSXU); \
844 xt = gen_vsr_ptr(xT(ctx->opcode)); \
845 xa = gen_vsr_ptr(xA(ctx->opcode)); \
846 xb = gen_vsr_ptr(xB(ctx->opcode)); \
847 if ((ctx->opcode >> (31 - 21)) & 1) { \
848 gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb); \
850 ignored = tcg_temp_new_i32(); \
851 gen_helper_##name(ignored, cpu_env, xt, xa, xb); \
852 tcg_temp_free_i32(ignored); \
854 gen_helper_float_check_status(cpu_env); \
855 tcg_temp_free_ptr(xt); \
856 tcg_temp_free_ptr(xa); \
857 tcg_temp_free_ptr(xb); \
860 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
861 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
862 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
863 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
864 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
865 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
866 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
867 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
869 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
874 REQUIRE_INSNS_FLAGS2(ctx, ISA300);
877 ro = tcg_const_i32(a->rc);
879 xt = gen_avr_ptr(a->rt);
880 xb = gen_avr_ptr(a->rb);
881 gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
882 tcg_temp_free_i32(ro);
883 tcg_temp_free_ptr(xt);
884 tcg_temp_free_ptr(xb);
889 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
890 static void gen_##name(DisasContext *ctx) \
893 if (unlikely(!ctx->vsx_enabled)) { \
894 gen_exception(ctx, POWERPC_EXCP_VSXU); \
897 opc = tcg_const_i32(ctx->opcode); \
898 gen_helper_##name(cpu_env, opc); \
899 tcg_temp_free_i32(opc); \
902 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type) \
903 static void gen_##name(DisasContext *ctx) \
905 TCGv_ptr xt, xa, xb; \
906 if (unlikely(!ctx->vsx_enabled)) { \
907 gen_exception(ctx, POWERPC_EXCP_VSXU); \
910 xt = gen_vsr_ptr(xT(ctx->opcode)); \
911 xa = gen_vsr_ptr(xA(ctx->opcode)); \
912 xb = gen_vsr_ptr(xB(ctx->opcode)); \
913 gen_helper_##name(cpu_env, xt, xa, xb); \
914 tcg_temp_free_ptr(xt); \
915 tcg_temp_free_ptr(xa); \
916 tcg_temp_free_ptr(xb); \
919 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type) \
920 static void gen_##name(DisasContext *ctx) \
923 if (unlikely(!ctx->vsx_enabled)) { \
924 gen_exception(ctx, POWERPC_EXCP_VSXU); \
927 xt = gen_vsr_ptr(xT(ctx->opcode)); \
928 xb = gen_vsr_ptr(xB(ctx->opcode)); \
929 gen_helper_##name(cpu_env, xt, xb); \
930 tcg_temp_free_ptr(xt); \
931 tcg_temp_free_ptr(xb); \
934 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type) \
935 static void gen_##name(DisasContext *ctx) \
939 if (unlikely(!ctx->vsx_enabled)) { \
940 gen_exception(ctx, POWERPC_EXCP_VSXU); \
943 opc = tcg_const_i32(ctx->opcode); \
944 xa = gen_vsr_ptr(xA(ctx->opcode)); \
945 xb = gen_vsr_ptr(xB(ctx->opcode)); \
946 gen_helper_##name(cpu_env, opc, xa, xb); \
947 tcg_temp_free_i32(opc); \
948 tcg_temp_free_ptr(xa); \
949 tcg_temp_free_ptr(xb); \
952 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type) \
953 static void gen_##name(DisasContext *ctx) \
957 if (unlikely(!ctx->vsx_enabled)) { \
958 gen_exception(ctx, POWERPC_EXCP_VSXU); \
961 opc = tcg_const_i32(ctx->opcode); \
962 xb = gen_vsr_ptr(xB(ctx->opcode)); \
963 gen_helper_##name(cpu_env, opc, xb); \
964 tcg_temp_free_i32(opc); \
965 tcg_temp_free_ptr(xb); \
968 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type) \
969 static void gen_##name(DisasContext *ctx) \
972 TCGv_ptr xt, xa, xb; \
973 if (unlikely(!ctx->vsx_enabled)) { \
974 gen_exception(ctx, POWERPC_EXCP_VSXU); \
977 opc = tcg_const_i32(ctx->opcode); \
978 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
979 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
980 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
981 gen_helper_##name(cpu_env, opc, xt, xa, xb); \
982 tcg_temp_free_i32(opc); \
983 tcg_temp_free_ptr(xt); \
984 tcg_temp_free_ptr(xa); \
985 tcg_temp_free_ptr(xb); \
988 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type) \
989 static void gen_##name(DisasContext *ctx) \
993 if (unlikely(!ctx->vsx_enabled)) { \
994 gen_exception(ctx, POWERPC_EXCP_VSXU); \
997 opc = tcg_const_i32(ctx->opcode); \
998 xt = gen_vsr_ptr(rD(ctx->opcode) + 32); \
999 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1000 gen_helper_##name(cpu_env, opc, xt, xb); \
1001 tcg_temp_free_i32(opc); \
1002 tcg_temp_free_ptr(xt); \
1003 tcg_temp_free_ptr(xb); \
1006 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type) \
1007 static void gen_##name(DisasContext *ctx) \
1011 if (unlikely(!ctx->vsx_enabled)) { \
1012 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1015 opc = tcg_const_i32(ctx->opcode); \
1016 xa = gen_vsr_ptr(rA(ctx->opcode) + 32); \
1017 xb = gen_vsr_ptr(rB(ctx->opcode) + 32); \
1018 gen_helper_##name(cpu_env, opc, xa, xb); \
1019 tcg_temp_free_i32(opc); \
1020 tcg_temp_free_ptr(xa); \
1021 tcg_temp_free_ptr(xb); \
1024 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
1025 static void gen_##name(DisasContext *ctx) \
1029 if (unlikely(!ctx->vsx_enabled)) { \
1030 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1033 t0 = tcg_temp_new_i64(); \
1034 t1 = tcg_temp_new_i64(); \
1035 get_cpu_vsr(t0, xB(ctx->opcode), true); \
1036 gen_helper_##name(t1, cpu_env, t0); \
1037 set_cpu_vsr(xT(ctx->opcode), t1, true); \
1038 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1039 tcg_temp_free_i64(t0); \
1040 tcg_temp_free_i64(t1); \
1043 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1044 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1045 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1046 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1047 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1048 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1049 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1050 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1051 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1052 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1053 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1054 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1055 GEN_VSX_HELPER_X3(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
1056 GEN_VSX_HELPER_X3(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
1057 GEN_VSX_HELPER_X3(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
1058 GEN_VSX_HELPER_X3(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
1059 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1060 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1061 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1062 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1063 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1064 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1065 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1066 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1067 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1068 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1069 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1070 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1071 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1072 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1073 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1074 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1075 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1076 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1077 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
1079 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1085 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1092 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1093 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1094 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1095 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1096 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1097 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1098 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1099 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1100 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1101 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1102 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1103 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1104 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1105 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1106 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1107 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1109 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1124 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1138 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1139 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1143 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1144 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1145 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1146 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1147 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1148 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1149 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1154 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1155 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1156 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1157 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1158 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1159 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1160 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1161 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1162 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1163 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1165 static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1167 TCGv_ptr xt, xa, xb;
1169 REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1172 xt = gen_vsr_ptr(a->xt);
1173 xa = gen_vsr_ptr(a->xa);
1174 xb = gen_vsr_ptr(a->xb);
1176 gen_helper_VPERM(xt, xa, xt, xb);
1178 tcg_temp_free_ptr(xt);
1179 tcg_temp_free_ptr(xa);
1180 tcg_temp_free_ptr(xb);
1185 static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1187 TCGv_ptr xt, xa, xb;
1189 REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1192 xt = gen_vsr_ptr(a->xt);
1193 xa = gen_vsr_ptr(a->xa);
1194 xb = gen_vsr_ptr(a->xb);
1196 gen_helper_VPERMR(xt, xa, xt, xb);
1198 tcg_temp_free_ptr(xt);
1199 tcg_temp_free_ptr(xa);
1200 tcg_temp_free_ptr(xb);
1205 static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1209 REQUIRE_INSNS_FLAGS2(ctx, VSX);
1212 t0 = tcg_temp_new_i64();
1214 if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1215 t1 = tcg_temp_new_i64();
1217 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1218 get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1220 set_cpu_vsr(a->xt, t0, true);
1221 set_cpu_vsr(a->xt, t1, false);
1223 tcg_temp_free_i64(t1);
1225 get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1226 set_cpu_vsr(a->xt, t0, true);
1228 get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1229 set_cpu_vsr(a->xt, t0, false);
1232 tcg_temp_free_i64(t0);
1237 static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1239 TCGv_ptr xt, xa, xb, xc;
1241 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1244 xt = gen_vsr_ptr(a->xt);
1245 xa = gen_vsr_ptr(a->xa);
1246 xb = gen_vsr_ptr(a->xb);
1247 xc = gen_vsr_ptr(a->xc);
1249 gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1251 tcg_temp_free_ptr(xt);
1252 tcg_temp_free_ptr(xa);
1253 tcg_temp_free_ptr(xb);
1254 tcg_temp_free_ptr(xc);
1259 #define XXGENPCV(NAME) \
1260 static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a) \
1264 REQUIRE_INSNS_FLAGS2(ctx, ISA310); \
1267 if (a->imm & ~0x3) { \
1272 xt = gen_vsr_ptr(a->xt); \
1273 vrb = gen_avr_ptr(a->vrb); \
1276 case 0b00000: /* Big-Endian expansion */ \
1277 glue(gen_helper_, glue(NAME, _be_exp))(xt, vrb); \
1279 case 0b00001: /* Big-Endian compression */ \
1280 glue(gen_helper_, glue(NAME, _be_comp))(xt, vrb); \
1282 case 0b00010: /* Little-Endian expansion */ \
1283 glue(gen_helper_, glue(NAME, _le_exp))(xt, vrb); \
1285 case 0b00011: /* Little-Endian compression */ \
1286 glue(gen_helper_, glue(NAME, _le_comp))(xt, vrb); \
1290 tcg_temp_free_ptr(xt); \
1291 tcg_temp_free_ptr(vrb); \
1296 XXGENPCV(XXGENPCVBM)
1297 XXGENPCV(XXGENPCVHM)
1298 XXGENPCV(XXGENPCVWM)
1299 XXGENPCV(XXGENPCVDM)
1302 static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1303 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1305 TCGv_ptr t, s1, s2, s3;
1307 t = gen_vsr_ptr(tgt);
1308 s1 = gen_vsr_ptr(src1);
1309 s2 = gen_vsr_ptr(src2);
1310 s3 = gen_vsr_ptr(src3);
1312 gen_helper(cpu_env, t, s1, s2, s3);
1314 tcg_temp_free_ptr(t);
1315 tcg_temp_free_ptr(s1);
1316 tcg_temp_free_ptr(s2);
1317 tcg_temp_free_ptr(s3);
1322 static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1323 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1328 return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1330 return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1333 TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1334 TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1335 TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1336 TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1337 TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1338 TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1339 TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1340 TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1341 TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1342 TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1343 TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1344 TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1345 TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1346 TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1347 TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1348 TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1350 static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1351 void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1352 void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1356 REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1364 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1367 return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1370 TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1371 TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1372 TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1373 TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1375 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type) \
1376 static void gen_##name(DisasContext *ctx) \
1378 TCGv_ptr xt, xa, b, c; \
1379 if (unlikely(!ctx->vsx_enabled)) { \
1380 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1383 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1384 xa = gen_vsr_ptr(xA(ctx->opcode)); \
1385 if (ctx->opcode & PPC_BIT32(25)) { \
1389 b = gen_vsr_ptr(xT(ctx->opcode)); \
1390 c = gen_vsr_ptr(xB(ctx->opcode)); \
1395 b = gen_vsr_ptr(xB(ctx->opcode)); \
1396 c = gen_vsr_ptr(xT(ctx->opcode)); \
1398 gen_helper_##name(cpu_env, xt, xa, b, c); \
1399 tcg_temp_free_ptr(xt); \
1400 tcg_temp_free_ptr(xa); \
1401 tcg_temp_free_ptr(b); \
1402 tcg_temp_free_ptr(c); \
1405 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1406 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1407 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1408 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1409 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1410 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1411 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1412 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1414 static void gen_xxbrd(DisasContext *ctx)
1421 if (unlikely(!ctx->vsx_enabled)) {
1422 gen_exception(ctx, POWERPC_EXCP_VSXU);
1425 xth = tcg_temp_new_i64();
1426 xtl = tcg_temp_new_i64();
1427 xbh = tcg_temp_new_i64();
1428 xbl = tcg_temp_new_i64();
1429 get_cpu_vsr(xbh, xB(ctx->opcode), true);
1430 get_cpu_vsr(xbl, xB(ctx->opcode), false);
1432 tcg_gen_bswap64_i64(xth, xbh);
1433 tcg_gen_bswap64_i64(xtl, xbl);
1434 set_cpu_vsr(xT(ctx->opcode), xth, true);
1435 set_cpu_vsr(xT(ctx->opcode), xtl, false);
1437 tcg_temp_free_i64(xth);
1438 tcg_temp_free_i64(xtl);
1439 tcg_temp_free_i64(xbh);
1440 tcg_temp_free_i64(xbl);
1443 static void gen_xxbrh(DisasContext *ctx)
1450 if (unlikely(!ctx->vsx_enabled)) {
1451 gen_exception(ctx, POWERPC_EXCP_VSXU);
1454 xth = tcg_temp_new_i64();
1455 xtl = tcg_temp_new_i64();
1456 xbh = tcg_temp_new_i64();
1457 xbl = tcg_temp_new_i64();
1458 get_cpu_vsr(xbh, xB(ctx->opcode), true);
1459 get_cpu_vsr(xbl, xB(ctx->opcode), false);
1461 gen_bswap16x8(xth, xtl, xbh, xbl);
1462 set_cpu_vsr(xT(ctx->opcode), xth, true);
1463 set_cpu_vsr(xT(ctx->opcode), xtl, false);
1465 tcg_temp_free_i64(xth);
1466 tcg_temp_free_i64(xtl);
1467 tcg_temp_free_i64(xbh);
1468 tcg_temp_free_i64(xbl);
1471 static void gen_xxbrq(DisasContext *ctx)
1479 if (unlikely(!ctx->vsx_enabled)) {
1480 gen_exception(ctx, POWERPC_EXCP_VSXU);
1483 xth = tcg_temp_new_i64();
1484 xtl = tcg_temp_new_i64();
1485 xbh = tcg_temp_new_i64();
1486 xbl = tcg_temp_new_i64();
1487 get_cpu_vsr(xbh, xB(ctx->opcode), true);
1488 get_cpu_vsr(xbl, xB(ctx->opcode), false);
1489 t0 = tcg_temp_new_i64();
1491 tcg_gen_bswap64_i64(t0, xbl);
1492 tcg_gen_bswap64_i64(xtl, xbh);
1493 set_cpu_vsr(xT(ctx->opcode), xtl, false);
1494 tcg_gen_mov_i64(xth, t0);
1495 set_cpu_vsr(xT(ctx->opcode), xth, true);
1497 tcg_temp_free_i64(t0);
1498 tcg_temp_free_i64(xth);
1499 tcg_temp_free_i64(xtl);
1500 tcg_temp_free_i64(xbh);
1501 tcg_temp_free_i64(xbl);
1504 static void gen_xxbrw(DisasContext *ctx)
1511 if (unlikely(!ctx->vsx_enabled)) {
1512 gen_exception(ctx, POWERPC_EXCP_VSXU);
1515 xth = tcg_temp_new_i64();
1516 xtl = tcg_temp_new_i64();
1517 xbh = tcg_temp_new_i64();
1518 xbl = tcg_temp_new_i64();
1519 get_cpu_vsr(xbh, xB(ctx->opcode), true);
1520 get_cpu_vsr(xbl, xB(ctx->opcode), false);
1522 gen_bswap32x4(xth, xtl, xbh, xbl);
1523 set_cpu_vsr(xT(ctx->opcode), xth, true);
1524 set_cpu_vsr(xT(ctx->opcode), xtl, false);
1526 tcg_temp_free_i64(xth);
1527 tcg_temp_free_i64(xtl);
1528 tcg_temp_free_i64(xbh);
1529 tcg_temp_free_i64(xbl);
1532 #define VSX_LOGICAL(name, vece, tcg_op) \
1533 static void glue(gen_, name)(DisasContext *ctx) \
1535 if (unlikely(!ctx->vsx_enabled)) { \
1536 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1539 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1540 vsr_full_offset(xA(ctx->opcode)), \
1541 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1544 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1545 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1546 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1547 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1548 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1549 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1550 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1551 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1553 #define VSX_XXMRG(name, high) \
1554 static void glue(gen_, name)(DisasContext *ctx) \
1556 TCGv_i64 a0, a1, b0, b1, tmp; \
1557 if (unlikely(!ctx->vsx_enabled)) { \
1558 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1561 a0 = tcg_temp_new_i64(); \
1562 a1 = tcg_temp_new_i64(); \
1563 b0 = tcg_temp_new_i64(); \
1564 b1 = tcg_temp_new_i64(); \
1565 tmp = tcg_temp_new_i64(); \
1566 get_cpu_vsr(a0, xA(ctx->opcode), high); \
1567 get_cpu_vsr(a1, xA(ctx->opcode), high); \
1568 get_cpu_vsr(b0, xB(ctx->opcode), high); \
1569 get_cpu_vsr(b1, xB(ctx->opcode), high); \
1570 tcg_gen_shri_i64(a0, a0, 32); \
1571 tcg_gen_shri_i64(b0, b0, 32); \
1572 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1573 set_cpu_vsr(xT(ctx->opcode), tmp, true); \
1574 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1575 set_cpu_vsr(xT(ctx->opcode), tmp, false); \
1576 tcg_temp_free_i64(a0); \
1577 tcg_temp_free_i64(a1); \
1578 tcg_temp_free_i64(b0); \
1579 tcg_temp_free_i64(b1); \
1580 tcg_temp_free_i64(tmp); \
1583 VSX_XXMRG(xxmrghw, 1)
1584 VSX_XXMRG(xxmrglw, 0)
1586 static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1588 REQUIRE_INSNS_FLAGS2(ctx, VSX);
1591 tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1592 vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1597 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2 *a)
1603 tofs = vsr_full_offset(a->xt);
1604 bofs = vsr_full_offset(a->xb);
1605 bofs += a->uim << MO_32;
1606 #ifndef HOST_WORDS_BIG_ENDIAN
1610 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1614 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1616 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1621 REQUIRE_VECTOR(ctx);
1623 tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1627 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1629 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1632 tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1637 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1639 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1642 tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1643 helper_todouble(a->si));
1647 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1651 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1654 imm = tcg_constant_i32(a->si);
1656 tcg_gen_st_i32(imm, cpu_env,
1657 offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1658 tcg_gen_st_i32(imm, cpu_env,
1659 offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1664 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1666 static const uint64_t values[32] = {
1667 0, /* Unspecified */
1668 0x3FFF000000000000llu, /* QP +1.0 */
1669 0x4000000000000000llu, /* QP +2.0 */
1670 0x4000800000000000llu, /* QP +3.0 */
1671 0x4001000000000000llu, /* QP +4.0 */
1672 0x4001400000000000llu, /* QP +5.0 */
1673 0x4001800000000000llu, /* QP +6.0 */
1674 0x4001C00000000000llu, /* QP +7.0 */
1675 0x7FFF000000000000llu, /* QP +Inf */
1676 0x7FFF800000000000llu, /* QP dQNaN */
1677 0, /* Unspecified */
1678 0, /* Unspecified */
1679 0, /* Unspecified */
1680 0, /* Unspecified */
1681 0, /* Unspecified */
1682 0, /* Unspecified */
1683 0x8000000000000000llu, /* QP -0.0 */
1684 0xBFFF000000000000llu, /* QP -1.0 */
1685 0xC000000000000000llu, /* QP -2.0 */
1686 0xC000800000000000llu, /* QP -3.0 */
1687 0xC001000000000000llu, /* QP -4.0 */
1688 0xC001400000000000llu, /* QP -5.0 */
1689 0xC001800000000000llu, /* QP -6.0 */
1690 0xC001C00000000000llu, /* QP -7.0 */
1691 0xFFFF000000000000llu, /* QP -Inf */
1694 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1697 if (values[a->uim]) {
1698 set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1699 set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1707 static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1709 TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1711 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1714 xb = tcg_temp_new_i64();
1715 t0 = tcg_temp_new_i64();
1716 t1 = tcg_temp_new_i64();
1717 all_true = tcg_temp_new_i64();
1718 all_false = tcg_temp_new_i64();
1719 mask = tcg_constant_i64(dup_const(MO_8, 1));
1720 zero = tcg_constant_i64(0);
1722 get_cpu_vsr(xb, a->xb, true);
1723 tcg_gen_and_i64(t0, mask, xb);
1724 get_cpu_vsr(xb, a->xb, false);
1725 tcg_gen_and_i64(t1, mask, xb);
1727 tcg_gen_or_i64(all_false, t0, t1);
1728 tcg_gen_and_i64(all_true, t0, t1);
1730 tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1731 tcg_gen_shli_i64(all_false, all_false, 1);
1732 tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1733 tcg_gen_shli_i64(all_true, all_true, 3);
1735 tcg_gen_or_i64(t0, all_false, all_true);
1736 tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1738 tcg_temp_free_i64(xb);
1739 tcg_temp_free_i64(t0);
1740 tcg_temp_free_i64(t1);
1741 tcg_temp_free_i64(all_true);
1742 tcg_temp_free_i64(all_false);
1747 static void gen_xxsldwi(DisasContext *ctx)
1750 if (unlikely(!ctx->vsx_enabled)) {
1751 gen_exception(ctx, POWERPC_EXCP_VSXU);
1754 xth = tcg_temp_new_i64();
1755 xtl = tcg_temp_new_i64();
1757 switch (SHW(ctx->opcode)) {
1759 get_cpu_vsr(xth, xA(ctx->opcode), true);
1760 get_cpu_vsr(xtl, xA(ctx->opcode), false);
1764 TCGv_i64 t0 = tcg_temp_new_i64();
1765 get_cpu_vsr(xth, xA(ctx->opcode), true);
1766 tcg_gen_shli_i64(xth, xth, 32);
1767 get_cpu_vsr(t0, xA(ctx->opcode), false);
1768 tcg_gen_shri_i64(t0, t0, 32);
1769 tcg_gen_or_i64(xth, xth, t0);
1770 get_cpu_vsr(xtl, xA(ctx->opcode), false);
1771 tcg_gen_shli_i64(xtl, xtl, 32);
1772 get_cpu_vsr(t0, xB(ctx->opcode), true);
1773 tcg_gen_shri_i64(t0, t0, 32);
1774 tcg_gen_or_i64(xtl, xtl, t0);
1775 tcg_temp_free_i64(t0);
1779 get_cpu_vsr(xth, xA(ctx->opcode), false);
1780 get_cpu_vsr(xtl, xB(ctx->opcode), true);
1784 TCGv_i64 t0 = tcg_temp_new_i64();
1785 get_cpu_vsr(xth, xA(ctx->opcode), false);
1786 tcg_gen_shli_i64(xth, xth, 32);
1787 get_cpu_vsr(t0, xB(ctx->opcode), true);
1788 tcg_gen_shri_i64(t0, t0, 32);
1789 tcg_gen_or_i64(xth, xth, t0);
1790 get_cpu_vsr(xtl, xB(ctx->opcode), true);
1791 tcg_gen_shli_i64(xtl, xtl, 32);
1792 get_cpu_vsr(t0, xB(ctx->opcode), false);
1793 tcg_gen_shri_i64(t0, t0, 32);
1794 tcg_gen_or_i64(xtl, xtl, t0);
1795 tcg_temp_free_i64(t0);
1800 set_cpu_vsr(xT(ctx->opcode), xth, true);
1801 set_cpu_vsr(xT(ctx->opcode), xtl, false);
1803 tcg_temp_free_i64(xth);
1804 tcg_temp_free_i64(xtl);
1807 #define VSX_EXTRACT_INSERT(name) \
1808 static void gen_##name(DisasContext *ctx) \
1813 uint8_t uimm = UIMM4(ctx->opcode); \
1815 if (unlikely(!ctx->vsx_enabled)) { \
1816 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1819 xt = gen_vsr_ptr(xT(ctx->opcode)); \
1820 xb = gen_vsr_ptr(xB(ctx->opcode)); \
1821 t0 = tcg_temp_new_i32(); \
1822 t1 = tcg_temp_new_i64(); \
1824 * uimm > 15 out of bound and for \
1825 * uimm > 12 handle as per hardware in helper \
1828 tcg_gen_movi_i64(t1, 0); \
1829 set_cpu_vsr(xT(ctx->opcode), t1, true); \
1830 set_cpu_vsr(xT(ctx->opcode), t1, false); \
1833 tcg_gen_movi_i32(t0, uimm); \
1834 gen_helper_##name(cpu_env, xt, xb, t0); \
1835 tcg_temp_free_ptr(xb); \
1836 tcg_temp_free_ptr(xt); \
1837 tcg_temp_free_i32(t0); \
1838 tcg_temp_free_i64(t1); \
1841 VSX_EXTRACT_INSERT(xxextractuw)
1842 VSX_EXTRACT_INSERT(xxinsertw)
1845 static void gen_xsxexpdp(DisasContext *ctx)
1847 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1849 if (unlikely(!ctx->vsx_enabled)) {
1850 gen_exception(ctx, POWERPC_EXCP_VSXU);
1853 t0 = tcg_temp_new_i64();
1854 get_cpu_vsr(t0, xB(ctx->opcode), true);
1855 tcg_gen_extract_i64(rt, t0, 52, 11);
1856 tcg_temp_free_i64(t0);
1859 static void gen_xsxexpqp(DisasContext *ctx)
1865 if (unlikely(!ctx->vsx_enabled)) {
1866 gen_exception(ctx, POWERPC_EXCP_VSXU);
1869 xth = tcg_temp_new_i64();
1870 xtl = tcg_temp_new_i64();
1871 xbh = tcg_temp_new_i64();
1872 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1874 tcg_gen_extract_i64(xth, xbh, 48, 15);
1875 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1876 tcg_gen_movi_i64(xtl, 0);
1877 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1879 tcg_temp_free_i64(xbh);
1880 tcg_temp_free_i64(xth);
1881 tcg_temp_free_i64(xtl);
1884 static void gen_xsiexpdp(DisasContext *ctx)
1887 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1888 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1891 if (unlikely(!ctx->vsx_enabled)) {
1892 gen_exception(ctx, POWERPC_EXCP_VSXU);
1895 t0 = tcg_temp_new_i64();
1896 xth = tcg_temp_new_i64();
1897 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1898 tcg_gen_andi_i64(t0, rb, 0x7FF);
1899 tcg_gen_shli_i64(t0, t0, 52);
1900 tcg_gen_or_i64(xth, xth, t0);
1901 set_cpu_vsr(xT(ctx->opcode), xth, true);
1902 set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1903 tcg_temp_free_i64(t0);
1904 tcg_temp_free_i64(xth);
1907 static void gen_xsiexpqp(DisasContext *ctx)
1916 if (unlikely(!ctx->vsx_enabled)) {
1917 gen_exception(ctx, POWERPC_EXCP_VSXU);
1920 xth = tcg_temp_new_i64();
1921 xtl = tcg_temp_new_i64();
1922 xah = tcg_temp_new_i64();
1923 xal = tcg_temp_new_i64();
1924 get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1925 get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1926 xbh = tcg_temp_new_i64();
1927 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1928 t0 = tcg_temp_new_i64();
1930 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1931 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1932 tcg_gen_shli_i64(t0, t0, 48);
1933 tcg_gen_or_i64(xth, xth, t0);
1934 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1935 tcg_gen_mov_i64(xtl, xal);
1936 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1938 tcg_temp_free_i64(t0);
1939 tcg_temp_free_i64(xth);
1940 tcg_temp_free_i64(xtl);
1941 tcg_temp_free_i64(xah);
1942 tcg_temp_free_i64(xal);
1943 tcg_temp_free_i64(xbh);
1946 static void gen_xsxsigdp(DisasContext *ctx)
1948 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1949 TCGv_i64 t0, t1, zr, nan, exp;
1951 if (unlikely(!ctx->vsx_enabled)) {
1952 gen_exception(ctx, POWERPC_EXCP_VSXU);
1955 exp = tcg_temp_new_i64();
1956 t0 = tcg_temp_new_i64();
1957 t1 = tcg_temp_new_i64();
1958 zr = tcg_const_i64(0);
1959 nan = tcg_const_i64(2047);
1961 get_cpu_vsr(t1, xB(ctx->opcode), true);
1962 tcg_gen_extract_i64(exp, t1, 52, 11);
1963 tcg_gen_movi_i64(t0, 0x0010000000000000);
1964 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1965 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1966 get_cpu_vsr(t1, xB(ctx->opcode), true);
1967 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1969 tcg_temp_free_i64(t0);
1970 tcg_temp_free_i64(t1);
1971 tcg_temp_free_i64(exp);
1972 tcg_temp_free_i64(zr);
1973 tcg_temp_free_i64(nan);
1976 static void gen_xsxsigqp(DisasContext *ctx)
1978 TCGv_i64 t0, zr, nan, exp;
1984 if (unlikely(!ctx->vsx_enabled)) {
1985 gen_exception(ctx, POWERPC_EXCP_VSXU);
1988 xth = tcg_temp_new_i64();
1989 xtl = tcg_temp_new_i64();
1990 xbh = tcg_temp_new_i64();
1991 xbl = tcg_temp_new_i64();
1992 get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1993 get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1994 exp = tcg_temp_new_i64();
1995 t0 = tcg_temp_new_i64();
1996 zr = tcg_const_i64(0);
1997 nan = tcg_const_i64(32767);
1999 tcg_gen_extract_i64(exp, xbh, 48, 15);
2000 tcg_gen_movi_i64(t0, 0x0001000000000000);
2001 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2002 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2003 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
2004 set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
2005 tcg_gen_mov_i64(xtl, xbl);
2006 set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
2008 tcg_temp_free_i64(t0);
2009 tcg_temp_free_i64(exp);
2010 tcg_temp_free_i64(zr);
2011 tcg_temp_free_i64(nan);
2012 tcg_temp_free_i64(xth);
2013 tcg_temp_free_i64(xtl);
2014 tcg_temp_free_i64(xbh);
2015 tcg_temp_free_i64(xbl);
2019 static void gen_xviexpsp(DisasContext *ctx)
2029 if (unlikely(!ctx->vsx_enabled)) {
2030 gen_exception(ctx, POWERPC_EXCP_VSXU);
2033 xth = tcg_temp_new_i64();
2034 xtl = tcg_temp_new_i64();
2035 xah = tcg_temp_new_i64();
2036 xal = tcg_temp_new_i64();
2037 xbh = tcg_temp_new_i64();
2038 xbl = tcg_temp_new_i64();
2039 get_cpu_vsr(xah, xA(ctx->opcode), true);
2040 get_cpu_vsr(xal, xA(ctx->opcode), false);
2041 get_cpu_vsr(xbh, xB(ctx->opcode), true);
2042 get_cpu_vsr(xbl, xB(ctx->opcode), false);
2043 t0 = tcg_temp_new_i64();
2045 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2046 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2047 tcg_gen_shli_i64(t0, t0, 23);
2048 tcg_gen_or_i64(xth, xth, t0);
2049 set_cpu_vsr(xT(ctx->opcode), xth, true);
2050 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2051 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2052 tcg_gen_shli_i64(t0, t0, 23);
2053 tcg_gen_or_i64(xtl, xtl, t0);
2054 set_cpu_vsr(xT(ctx->opcode), xtl, false);
2056 tcg_temp_free_i64(t0);
2057 tcg_temp_free_i64(xth);
2058 tcg_temp_free_i64(xtl);
2059 tcg_temp_free_i64(xah);
2060 tcg_temp_free_i64(xal);
2061 tcg_temp_free_i64(xbh);
2062 tcg_temp_free_i64(xbl);
2065 static void gen_xviexpdp(DisasContext *ctx)
2074 if (unlikely(!ctx->vsx_enabled)) {
2075 gen_exception(ctx, POWERPC_EXCP_VSXU);
2078 xth = tcg_temp_new_i64();
2079 xtl = tcg_temp_new_i64();
2080 xah = tcg_temp_new_i64();
2081 xal = tcg_temp_new_i64();
2082 xbh = tcg_temp_new_i64();
2083 xbl = tcg_temp_new_i64();
2084 get_cpu_vsr(xah, xA(ctx->opcode), true);
2085 get_cpu_vsr(xal, xA(ctx->opcode), false);
2086 get_cpu_vsr(xbh, xB(ctx->opcode), true);
2087 get_cpu_vsr(xbl, xB(ctx->opcode), false);
2089 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2090 set_cpu_vsr(xT(ctx->opcode), xth, true);
2092 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2093 set_cpu_vsr(xT(ctx->opcode), xtl, false);
2095 tcg_temp_free_i64(xth);
2096 tcg_temp_free_i64(xtl);
2097 tcg_temp_free_i64(xah);
2098 tcg_temp_free_i64(xal);
2099 tcg_temp_free_i64(xbh);
2100 tcg_temp_free_i64(xbl);
2103 static void gen_xvxexpsp(DisasContext *ctx)
2110 if (unlikely(!ctx->vsx_enabled)) {
2111 gen_exception(ctx, POWERPC_EXCP_VSXU);
2114 xth = tcg_temp_new_i64();
2115 xtl = tcg_temp_new_i64();
2116 xbh = tcg_temp_new_i64();
2117 xbl = tcg_temp_new_i64();
2118 get_cpu_vsr(xbh, xB(ctx->opcode), true);
2119 get_cpu_vsr(xbl, xB(ctx->opcode), false);
2121 tcg_gen_shri_i64(xth, xbh, 23);
2122 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2123 set_cpu_vsr(xT(ctx->opcode), xth, true);
2124 tcg_gen_shri_i64(xtl, xbl, 23);
2125 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2126 set_cpu_vsr(xT(ctx->opcode), xtl, false);
2128 tcg_temp_free_i64(xth);
2129 tcg_temp_free_i64(xtl);
2130 tcg_temp_free_i64(xbh);
2131 tcg_temp_free_i64(xbl);
2134 static void gen_xvxexpdp(DisasContext *ctx)
2141 if (unlikely(!ctx->vsx_enabled)) {
2142 gen_exception(ctx, POWERPC_EXCP_VSXU);
2145 xth = tcg_temp_new_i64();
2146 xtl = tcg_temp_new_i64();
2147 xbh = tcg_temp_new_i64();
2148 xbl = tcg_temp_new_i64();
2149 get_cpu_vsr(xbh, xB(ctx->opcode), true);
2150 get_cpu_vsr(xbl, xB(ctx->opcode), false);
2152 tcg_gen_extract_i64(xth, xbh, 52, 11);
2153 set_cpu_vsr(xT(ctx->opcode), xth, true);
2154 tcg_gen_extract_i64(xtl, xbl, 52, 11);
2155 set_cpu_vsr(xT(ctx->opcode), xtl, false);
2157 tcg_temp_free_i64(xth);
2158 tcg_temp_free_i64(xtl);
2159 tcg_temp_free_i64(xbh);
2160 tcg_temp_free_i64(xbl);
2163 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2165 static void gen_xvxsigdp(DisasContext *ctx)
2171 TCGv_i64 t0, zr, nan, exp;
2173 if (unlikely(!ctx->vsx_enabled)) {
2174 gen_exception(ctx, POWERPC_EXCP_VSXU);
2177 xth = tcg_temp_new_i64();
2178 xtl = tcg_temp_new_i64();
2179 xbh = tcg_temp_new_i64();
2180 xbl = tcg_temp_new_i64();
2181 get_cpu_vsr(xbh, xB(ctx->opcode), true);
2182 get_cpu_vsr(xbl, xB(ctx->opcode), false);
2183 exp = tcg_temp_new_i64();
2184 t0 = tcg_temp_new_i64();
2185 zr = tcg_const_i64(0);
2186 nan = tcg_const_i64(2047);
2188 tcg_gen_extract_i64(exp, xbh, 52, 11);
2189 tcg_gen_movi_i64(t0, 0x0010000000000000);
2190 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2191 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2192 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2193 set_cpu_vsr(xT(ctx->opcode), xth, true);
2195 tcg_gen_extract_i64(exp, xbl, 52, 11);
2196 tcg_gen_movi_i64(t0, 0x0010000000000000);
2197 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2198 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2199 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2200 set_cpu_vsr(xT(ctx->opcode), xtl, false);
2202 tcg_temp_free_i64(t0);
2203 tcg_temp_free_i64(exp);
2204 tcg_temp_free_i64(zr);
2205 tcg_temp_free_i64(nan);
2206 tcg_temp_free_i64(xth);
2207 tcg_temp_free_i64(xtl);
2208 tcg_temp_free_i64(xbh);
2209 tcg_temp_free_i64(xbl);
2212 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2213 int rt, bool store, bool paired)
2220 xt = tcg_temp_new_i64();
2222 mop = DEF_MEMOP(MO_UQ);
2224 gen_set_access_type(ctx, ACCESS_INT);
2225 ea = do_ea_calc(ctx, ra, displ);
2227 if (paired && ctx->le_mode) {
2236 get_cpu_vsr(xt, rt1, !ctx->le_mode);
2237 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2238 gen_addr_add(ctx, ea, ea, 8);
2239 get_cpu_vsr(xt, rt1, ctx->le_mode);
2240 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2242 gen_addr_add(ctx, ea, ea, 8);
2243 get_cpu_vsr(xt, rt2, !ctx->le_mode);
2244 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2245 gen_addr_add(ctx, ea, ea, 8);
2246 get_cpu_vsr(xt, rt2, ctx->le_mode);
2247 tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2250 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2251 set_cpu_vsr(rt1, xt, !ctx->le_mode);
2252 gen_addr_add(ctx, ea, ea, 8);
2253 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2254 set_cpu_vsr(rt1, xt, ctx->le_mode);
2256 gen_addr_add(ctx, ea, ea, 8);
2257 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2258 set_cpu_vsr(rt2, xt, !ctx->le_mode);
2259 gen_addr_add(ctx, ea, ea, 8);
2260 tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2261 set_cpu_vsr(rt2, xt, ctx->le_mode);
2266 tcg_temp_free_i64(xt);
2270 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2272 if (paired || a->rt >= 32) {
2275 REQUIRE_VECTOR(ctx);
2278 return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2281 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2282 bool store, bool paired)
2287 if (!resolve_PLS_D(ctx, &d, a)) {
2291 return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2294 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2296 if (paired || a->rt >= 32) {
2299 REQUIRE_VECTOR(ctx);
2302 return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2305 TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2306 TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2307 TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2308 TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2309 TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2310 TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2311 TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2312 TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2313 TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2314 TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2315 TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2316 TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2318 static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2322 * Instead of processing imm bit-by-bit, we'll skip the computation of
2323 * conjunctions whose corresponding bit is unset.
2326 TCGv_i64 conj, disj;
2328 conj = tcg_temp_new_i64();
2329 disj = tcg_const_i64(0);
2331 /* Iterate over set bits from the least to the most significant bit */
2334 * Get the next bit to be processed with ctz64. Invert the result of
2335 * ctz64 to match the indexing used by PowerISA.
2337 bit = 7 - ctz64(imm);
2339 tcg_gen_mov_i64(conj, a);
2341 tcg_gen_not_i64(conj, a);
2344 tcg_gen_and_i64(conj, conj, b);
2346 tcg_gen_andc_i64(conj, conj, b);
2349 tcg_gen_and_i64(conj, conj, c);
2351 tcg_gen_andc_i64(conj, conj, c);
2353 tcg_gen_or_i64(disj, disj, conj);
2355 /* Unset the least significant bit that is set */
2359 tcg_gen_mov_i64(t, disj);
2361 tcg_temp_free_i64(conj);
2362 tcg_temp_free_i64(disj);
2365 static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2366 TCGv_vec c, int64_t imm)
2369 * Instead of processing imm bit-by-bit, we'll skip the computation of
2370 * conjunctions whose corresponding bit is unset.
2373 TCGv_vec disj, conj;
2375 disj = tcg_const_zeros_vec_matching(t);
2376 conj = tcg_temp_new_vec_matching(t);
2378 /* Iterate over set bits from the least to the most significant bit */
2381 * Get the next bit to be processed with ctz64. Invert the result of
2382 * ctz64 to match the indexing used by PowerISA.
2384 bit = 7 - ctz64(imm);
2386 tcg_gen_mov_vec(conj, a);
2388 tcg_gen_not_vec(vece, conj, a);
2391 tcg_gen_and_vec(vece, conj, conj, b);
2393 tcg_gen_andc_vec(vece, conj, conj, b);
2396 tcg_gen_and_vec(vece, conj, conj, c);
2398 tcg_gen_andc_vec(vece, conj, conj, c);
2400 tcg_gen_or_vec(vece, disj, disj, conj);
2402 /* Unset the least significant bit that is set */
2406 tcg_gen_mov_vec(t, disj);
2408 tcg_temp_free_vec(disj);
2409 tcg_temp_free_vec(conj);
2412 static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2414 static const TCGOpcode vecop_list[] = {
2415 INDEX_op_andc_vec, 0
2417 static const GVecGen4i op = {
2418 .fniv = gen_xxeval_vec,
2419 .fno = gen_helper_XXEVAL,
2420 .fni8 = gen_xxeval_i64,
2421 .opt_opc = vecop_list,
2424 int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2425 xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2427 REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2430 /* Equivalent functions that can be implemented with a single gen_gvec */
2432 case 0b00000000: /* true */
2433 set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2434 set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2436 case 0b00000011: /* and(B,A) */
2437 tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2439 case 0b00000101: /* and(C,A) */
2440 tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2442 case 0b00001111: /* A */
2443 tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2445 case 0b00010001: /* and(C,B) */
2446 tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2448 case 0b00011011: /* C?B:A */
2449 tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2451 case 0b00011101: /* B?C:A */
2452 tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2454 case 0b00100111: /* C?A:B */
2455 tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2457 case 0b00110011: /* B */
2458 tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2460 case 0b00110101: /* A?C:B */
2461 tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2463 case 0b00111100: /* xor(B,A) */
2464 tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2466 case 0b00111111: /* or(B,A) */
2467 tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2469 case 0b01000111: /* B?A:C */
2470 tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2472 case 0b01010011: /* A?B:C */
2473 tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2475 case 0b01010101: /* C */
2476 tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2478 case 0b01011010: /* xor(C,A) */
2479 tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2481 case 0b01011111: /* or(C,A) */
2482 tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2484 case 0b01100110: /* xor(C,B) */
2485 tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2487 case 0b01110111: /* or(C,B) */
2488 tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2490 case 0b10001000: /* nor(C,B) */
2491 tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2493 case 0b10011001: /* eqv(C,B) */
2494 tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2496 case 0b10100000: /* nor(C,A) */
2497 tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2499 case 0b10100101: /* eqv(C,A) */
2500 tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2502 case 0b10101010: /* not(C) */
2503 tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2505 case 0b11000000: /* nor(B,A) */
2506 tcg_gen_gvec_nor(MO_64, xt, xb, xa, 16, 16);
2508 case 0b11000011: /* eqv(B,A) */
2509 tcg_gen_gvec_eqv(MO_64, xt, xb, xa, 16, 16);
2511 case 0b11001100: /* not(B) */
2512 tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2514 case 0b11101110: /* nand(C,B) */
2515 tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2517 case 0b11110000: /* not(A) */
2518 tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2520 case 0b11111010: /* nand(C,A) */
2521 tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2523 case 0b11111100: /* nand(B,A) */
2524 tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2526 case 0b11111111: /* true */
2527 set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2528 set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2531 /* Fallback to compute all conjunctions/disjunctions */
2532 tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2538 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2541 TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2542 tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2543 tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2544 tcg_temp_free_vec(tmp);
2547 static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2549 static const TCGOpcode vecop_list[] = {
2550 INDEX_op_sari_vec, 0
2552 static const GVecGen4 ops[4] = {
2554 .fniv = gen_xxblendv_vec,
2555 .fno = gen_helper_XXBLENDVB,
2556 .opt_opc = vecop_list,
2560 .fniv = gen_xxblendv_vec,
2561 .fno = gen_helper_XXBLENDVH,
2562 .opt_opc = vecop_list,
2566 .fniv = gen_xxblendv_vec,
2567 .fno = gen_helper_XXBLENDVW,
2568 .opt_opc = vecop_list,
2572 .fniv = gen_xxblendv_vec,
2573 .fno = gen_helper_XXBLENDVD,
2574 .opt_opc = vecop_list,
2581 tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2582 vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2583 16, 16, &ops[vece]);
2588 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2589 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2590 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2591 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2593 static bool do_xsmaxmincjdp(DisasContext *ctx, arg_XX3 *a,
2594 void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2596 TCGv_ptr xt, xa, xb;
2598 REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2601 xt = gen_vsr_ptr(a->xt);
2602 xa = gen_vsr_ptr(a->xa);
2603 xb = gen_vsr_ptr(a->xb);
2605 helper(cpu_env, xt, xa, xb);
2607 tcg_temp_free_ptr(xt);
2608 tcg_temp_free_ptr(xa);
2609 tcg_temp_free_ptr(xb);
2614 TRANS(XSMAXCDP, do_xsmaxmincjdp, gen_helper_xsmaxcdp)
2615 TRANS(XSMINCDP, do_xsmaxmincjdp, gen_helper_xsmincdp)
2616 TRANS(XSMAXJDP, do_xsmaxmincjdp, gen_helper_xsmaxjdp)
2617 TRANS(XSMINJDP, do_xsmaxmincjdp, gen_helper_xsminjdp)
2622 #undef GEN_XX3_RC_FORM
2623 #undef GEN_XX3FORM_DM