1 /*** VSX extension ***/
3 static inline TCGv_i64
cpu_vsrh(int n
)
12 static inline TCGv_i64
cpu_vsrl(int n
)
17 return cpu_avrl
[n
-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
38 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
39 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
40 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
41 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
42 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
44 static void gen_lxvd2x(DisasContext
*ctx
)
47 if (unlikely(!ctx
->vsx_enabled
)) {
48 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
51 gen_set_access_type(ctx
, ACCESS_INT
);
53 gen_addr_reg_index(ctx
, EA
);
54 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
55 tcg_gen_addi_tl(EA
, EA
, 8);
56 gen_qemu_ld64_i64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
60 static void gen_lxvdsx(DisasContext
*ctx
)
63 if (unlikely(!ctx
->vsx_enabled
)) {
64 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
67 gen_set_access_type(ctx
, ACCESS_INT
);
69 gen_addr_reg_index(ctx
, EA
);
70 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
75 static void gen_lxvw4x(DisasContext
*ctx
)
78 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
79 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
80 if (unlikely(!ctx
->vsx_enabled
)) {
81 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
84 gen_set_access_type(ctx
, ACCESS_INT
);
87 gen_addr_reg_index(ctx
, EA
);
89 TCGv_i64 t0
= tcg_temp_new_i64();
90 TCGv_i64 t1
= tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
93 tcg_gen_shri_i64(t1
, t0
, 32);
94 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
95 tcg_gen_addi_tl(EA
, EA
, 8);
96 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
97 tcg_gen_shri_i64(t1
, t0
, 32);
98 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
99 tcg_temp_free_i64(t0
);
100 tcg_temp_free_i64(t1
);
102 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
103 tcg_gen_addi_tl(EA
, EA
, 8);
104 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
109 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
110 TCGv_i64 inh
, TCGv_i64 inl
)
112 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0
= tcg_temp_new_i64();
114 TCGv_i64 t1
= tcg_temp_new_i64();
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0
, inh
, mask
);
118 tcg_gen_shli_i64(t0
, t0
, 8);
119 tcg_gen_shri_i64(t1
, inh
, 8);
120 tcg_gen_and_i64(t1
, t1
, mask
);
121 tcg_gen_or_i64(outh
, t0
, t1
);
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0
, inl
, mask
);
125 tcg_gen_shli_i64(t0
, t0
, 8);
126 tcg_gen_shri_i64(t1
, inl
, 8);
127 tcg_gen_and_i64(t1
, t1
, mask
);
128 tcg_gen_or_i64(outl
, t0
, t1
);
130 tcg_temp_free_i64(t0
);
131 tcg_temp_free_i64(t1
);
132 tcg_temp_free_i64(mask
);
135 static void gen_bswap32x4(TCGv_i64 outh
, TCGv_i64 outl
,
136 TCGv_i64 inh
, TCGv_i64 inl
)
138 TCGv_i64 hi
= tcg_temp_new_i64();
139 TCGv_i64 lo
= tcg_temp_new_i64();
141 tcg_gen_bswap64_i64(hi
, inh
);
142 tcg_gen_bswap64_i64(lo
, inl
);
143 tcg_gen_shri_i64(outh
, hi
, 32);
144 tcg_gen_deposit_i64(outh
, outh
, hi
, 32, 32);
145 tcg_gen_shri_i64(outl
, lo
, 32);
146 tcg_gen_deposit_i64(outl
, outl
, lo
, 32, 32);
148 tcg_temp_free_i64(hi
);
149 tcg_temp_free_i64(lo
);
151 static void gen_lxvh8x(DisasContext
*ctx
)
154 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
155 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
157 if (unlikely(!ctx
->vsx_enabled
)) {
158 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
161 gen_set_access_type(ctx
, ACCESS_INT
);
164 gen_addr_reg_index(ctx
, EA
);
165 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
166 tcg_gen_addi_tl(EA
, EA
, 8);
167 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
169 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
174 static void gen_lxvb16x(DisasContext
*ctx
)
177 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
178 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
180 if (unlikely(!ctx
->vsx_enabled
)) {
181 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
184 gen_set_access_type(ctx
, ACCESS_INT
);
186 gen_addr_reg_index(ctx
, EA
);
187 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
188 tcg_gen_addi_tl(EA
, EA
, 8);
189 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
193 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
194 static void gen_##name(DisasContext *ctx) \
201 xt = xT(ctx->opcode); \
203 xt = DQxT(ctx->opcode); \
205 xth = cpu_vsrh(xt); \
206 xtl = cpu_vsrl(xt); \
209 if (unlikely(!ctx->vsx_enabled)) { \
210 gen_exception(ctx, POWERPC_EXCP_VSXU); \
214 if (unlikely(!ctx->altivec_enabled)) { \
215 gen_exception(ctx, POWERPC_EXCP_VPU); \
219 gen_set_access_type(ctx, ACCESS_INT); \
220 EA = tcg_temp_new(); \
222 gen_addr_reg_index(ctx, EA); \
224 gen_addr_imm_index(ctx, EA, 0x0F); \
226 if (ctx->le_mode) { \
227 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
228 tcg_gen_addi_tl(EA, EA, 8); \
229 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
231 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
232 tcg_gen_addi_tl(EA, EA, 8); \
233 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
238 VSX_VECTOR_LOAD_STORE(lxv
, ld_i64
, 0)
239 VSX_VECTOR_LOAD_STORE(stxv
, st_i64
, 0)
240 VSX_VECTOR_LOAD_STORE(lxvx
, ld_i64
, 1)
241 VSX_VECTOR_LOAD_STORE(stxvx
, st_i64
, 1)
244 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
245 static void gen_##name(DisasContext *ctx) \
249 if (xT(ctx->opcode) < 32) { \
250 if (unlikely(!ctx->vsx_enabled)) { \
251 gen_exception(ctx, POWERPC_EXCP_VSXU); \
255 if (unlikely(!ctx->altivec_enabled)) { \
256 gen_exception(ctx, POWERPC_EXCP_VPU); \
260 EA = tcg_temp_new(); \
261 xt = tcg_const_tl(xT(ctx->opcode)); \
262 gen_set_access_type(ctx, ACCESS_INT); \
263 gen_addr_register(ctx, EA); \
264 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
269 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl
)
270 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll
)
271 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl
)
272 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll
)
275 #define VSX_LOAD_SCALAR_DS(name, operation) \
276 static void gen_##name(DisasContext *ctx) \
279 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
281 if (unlikely(!ctx->altivec_enabled)) { \
282 gen_exception(ctx, POWERPC_EXCP_VPU); \
285 gen_set_access_type(ctx, ACCESS_INT); \
286 EA = tcg_temp_new(); \
287 gen_addr_imm_index(ctx, EA, 0x03); \
288 gen_qemu_##operation(ctx, xth, EA); \
289 /* NOTE: cpu_vsrl is undefined */ \
293 VSX_LOAD_SCALAR_DS(lxsd
, ld64_i64
)
294 VSX_LOAD_SCALAR_DS(lxssp
, ld32fs
)
296 #define VSX_STORE_SCALAR(name, operation) \
297 static void gen_##name(DisasContext *ctx) \
300 if (unlikely(!ctx->vsx_enabled)) { \
301 gen_exception(ctx, POWERPC_EXCP_VSXU); \
304 gen_set_access_type(ctx, ACCESS_INT); \
305 EA = tcg_temp_new(); \
306 gen_addr_reg_index(ctx, EA); \
307 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
311 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
313 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
314 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
315 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
316 VSX_STORE_SCALAR(stxsspx
, st32fs
)
318 static void gen_stxvd2x(DisasContext
*ctx
)
321 if (unlikely(!ctx
->vsx_enabled
)) {
322 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
325 gen_set_access_type(ctx
, ACCESS_INT
);
327 gen_addr_reg_index(ctx
, EA
);
328 gen_qemu_st64_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
329 tcg_gen_addi_tl(EA
, EA
, 8);
330 gen_qemu_st64_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
334 static void gen_stxvw4x(DisasContext
*ctx
)
336 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
337 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
339 if (unlikely(!ctx
->vsx_enabled
)) {
340 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
343 gen_set_access_type(ctx
, ACCESS_INT
);
345 gen_addr_reg_index(ctx
, EA
);
347 TCGv_i64 t0
= tcg_temp_new_i64();
348 TCGv_i64 t1
= tcg_temp_new_i64();
350 tcg_gen_shri_i64(t0
, xsh
, 32);
351 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
352 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
353 tcg_gen_addi_tl(EA
, EA
, 8);
354 tcg_gen_shri_i64(t0
, xsl
, 32);
355 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
356 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
357 tcg_temp_free_i64(t0
);
358 tcg_temp_free_i64(t1
);
360 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
361 tcg_gen_addi_tl(EA
, EA
, 8);
362 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
367 static void gen_stxvh8x(DisasContext
*ctx
)
369 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
370 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
373 if (unlikely(!ctx
->vsx_enabled
)) {
374 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
377 gen_set_access_type(ctx
, ACCESS_INT
);
379 gen_addr_reg_index(ctx
, EA
);
381 TCGv_i64 outh
= tcg_temp_new_i64();
382 TCGv_i64 outl
= tcg_temp_new_i64();
384 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
385 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
386 tcg_gen_addi_tl(EA
, EA
, 8);
387 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
388 tcg_temp_free_i64(outh
);
389 tcg_temp_free_i64(outl
);
391 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
392 tcg_gen_addi_tl(EA
, EA
, 8);
393 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
398 static void gen_stxvb16x(DisasContext
*ctx
)
400 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
401 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
404 if (unlikely(!ctx
->vsx_enabled
)) {
405 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
408 gen_set_access_type(ctx
, ACCESS_INT
);
410 gen_addr_reg_index(ctx
, EA
);
411 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
412 tcg_gen_addi_tl(EA
, EA
, 8);
413 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
417 #define VSX_STORE_SCALAR_DS(name, operation) \
418 static void gen_##name(DisasContext *ctx) \
421 TCGv_i64 xth = cpu_vsrh(rD(ctx->opcode) + 32); \
423 if (unlikely(!ctx->altivec_enabled)) { \
424 gen_exception(ctx, POWERPC_EXCP_VPU); \
427 gen_set_access_type(ctx, ACCESS_INT); \
428 EA = tcg_temp_new(); \
429 gen_addr_imm_index(ctx, EA, 0x03); \
430 gen_qemu_##operation(ctx, xth, EA); \
431 /* NOTE: cpu_vsrl is undefined */ \
435 VSX_LOAD_SCALAR_DS(stxsd
, st64_i64
)
436 VSX_LOAD_SCALAR_DS(stxssp
, st32fs
)
438 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
439 static void gen_##name(DisasContext *ctx) \
441 if (xS(ctx->opcode) < 32) { \
442 if (unlikely(!ctx->fpu_enabled)) { \
443 gen_exception(ctx, POWERPC_EXCP_FPU); \
447 if (unlikely(!ctx->altivec_enabled)) { \
448 gen_exception(ctx, POWERPC_EXCP_VPU); \
452 TCGv_i64 tmp = tcg_temp_new_i64(); \
453 tcg_gen_##tcgop1(tmp, source); \
454 tcg_gen_##tcgop2(target, tmp); \
455 tcg_temp_free_i64(tmp); \
459 MV_VSRW(mfvsrwz
, ext32u_i64
, trunc_i64_tl
, cpu_gpr
[rA(ctx
->opcode
)], \
460 cpu_vsrh(xS(ctx
->opcode
)))
461 MV_VSRW(mtvsrwa
, extu_tl_i64
, ext32s_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
462 cpu_gpr
[rA(ctx
->opcode
)])
463 MV_VSRW(mtvsrwz
, extu_tl_i64
, ext32u_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
464 cpu_gpr
[rA(ctx
->opcode
)])
466 #if defined(TARGET_PPC64)
467 #define MV_VSRD(name, target, source) \
468 static void gen_##name(DisasContext *ctx) \
470 if (xS(ctx->opcode) < 32) { \
471 if (unlikely(!ctx->fpu_enabled)) { \
472 gen_exception(ctx, POWERPC_EXCP_FPU); \
476 if (unlikely(!ctx->altivec_enabled)) { \
477 gen_exception(ctx, POWERPC_EXCP_VPU); \
481 tcg_gen_mov_i64(target, source); \
484 MV_VSRD(mfvsrd
, cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrh(xS(ctx
->opcode
)))
485 MV_VSRD(mtvsrd
, cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)])
487 static void gen_mfvsrld(DisasContext
*ctx
)
489 if (xS(ctx
->opcode
) < 32) {
490 if (unlikely(!ctx
->vsx_enabled
)) {
491 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
495 if (unlikely(!ctx
->altivec_enabled
)) {
496 gen_exception(ctx
, POWERPC_EXCP_VPU
);
501 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrl(xS(ctx
->opcode
)));
504 static void gen_mtvsrdd(DisasContext
*ctx
)
506 if (xT(ctx
->opcode
) < 32) {
507 if (unlikely(!ctx
->vsx_enabled
)) {
508 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
512 if (unlikely(!ctx
->altivec_enabled
)) {
513 gen_exception(ctx
, POWERPC_EXCP_VPU
);
518 if (!rA(ctx
->opcode
)) {
519 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), 0);
521 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)]);
524 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rB(ctx
->opcode
)]);
527 static void gen_mtvsrws(DisasContext
*ctx
)
529 if (xT(ctx
->opcode
) < 32) {
530 if (unlikely(!ctx
->vsx_enabled
)) {
531 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
535 if (unlikely(!ctx
->altivec_enabled
)) {
536 gen_exception(ctx
, POWERPC_EXCP_VPU
);
541 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)],
542 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
543 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xT(ctx
->opcode
)));
548 static void gen_xxpermdi(DisasContext
*ctx
)
550 if (unlikely(!ctx
->vsx_enabled
)) {
551 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
555 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
556 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
559 xh
= tcg_temp_new_i64();
560 xl
= tcg_temp_new_i64();
562 if ((DM(ctx
->opcode
) & 2) == 0) {
563 tcg_gen_mov_i64(xh
, cpu_vsrh(xA(ctx
->opcode
)));
565 tcg_gen_mov_i64(xh
, cpu_vsrl(xA(ctx
->opcode
)));
567 if ((DM(ctx
->opcode
) & 1) == 0) {
568 tcg_gen_mov_i64(xl
, cpu_vsrh(xB(ctx
->opcode
)));
570 tcg_gen_mov_i64(xl
, cpu_vsrl(xB(ctx
->opcode
)));
573 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xh
);
574 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xl
);
576 tcg_temp_free_i64(xh
);
577 tcg_temp_free_i64(xl
);
579 if ((DM(ctx
->opcode
) & 2) == 0) {
580 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
582 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
584 if ((DM(ctx
->opcode
) & 1) == 0) {
585 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
587 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
596 #define SGN_MASK_DP 0x8000000000000000ull
597 #define SGN_MASK_SP 0x8000000080000000ull
599 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
600 static void glue(gen_, name)(DisasContext * ctx) \
603 if (unlikely(!ctx->vsx_enabled)) { \
604 gen_exception(ctx, POWERPC_EXCP_VSXU); \
607 xb = tcg_temp_new_i64(); \
608 sgm = tcg_temp_new_i64(); \
609 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
610 tcg_gen_movi_i64(sgm, sgn_mask); \
613 tcg_gen_andc_i64(xb, xb, sgm); \
617 tcg_gen_or_i64(xb, xb, sgm); \
621 tcg_gen_xor_i64(xb, xb, sgm); \
625 TCGv_i64 xa = tcg_temp_new_i64(); \
626 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
627 tcg_gen_and_i64(xa, xa, sgm); \
628 tcg_gen_andc_i64(xb, xb, sgm); \
629 tcg_gen_or_i64(xb, xb, xa); \
630 tcg_temp_free_i64(xa); \
634 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
635 tcg_temp_free_i64(xb); \
636 tcg_temp_free_i64(sgm); \
639 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
640 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
641 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
642 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
644 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
645 static void glue(gen_, name)(DisasContext *ctx) \
648 int xt = rD(ctx->opcode) + 32; \
649 int xb = rB(ctx->opcode) + 32; \
650 TCGv_i64 xah, xbh, xbl, sgm; \
652 if (unlikely(!ctx->vsx_enabled)) { \
653 gen_exception(ctx, POWERPC_EXCP_VSXU); \
656 xbh = tcg_temp_new_i64(); \
657 xbl = tcg_temp_new_i64(); \
658 sgm = tcg_temp_new_i64(); \
659 tcg_gen_mov_i64(xbh, cpu_vsrh(xb)); \
660 tcg_gen_mov_i64(xbl, cpu_vsrl(xb)); \
661 tcg_gen_movi_i64(sgm, sgn_mask); \
664 tcg_gen_andc_i64(xbh, xbh, sgm); \
667 tcg_gen_or_i64(xbh, xbh, sgm); \
670 tcg_gen_xor_i64(xbh, xbh, sgm); \
673 xah = tcg_temp_new_i64(); \
674 xa = rA(ctx->opcode) + 32; \
675 tcg_gen_and_i64(xah, cpu_vsrh(xa), sgm); \
676 tcg_gen_andc_i64(xbh, xbh, sgm); \
677 tcg_gen_or_i64(xbh, xbh, xah); \
678 tcg_temp_free_i64(xah); \
681 tcg_gen_mov_i64(cpu_vsrh(xt), xbh); \
682 tcg_gen_mov_i64(cpu_vsrl(xt), xbl); \
683 tcg_temp_free_i64(xbl); \
684 tcg_temp_free_i64(xbh); \
685 tcg_temp_free_i64(sgm); \
688 VSX_SCALAR_MOVE_QP(xsabsqp
, OP_ABS
, SGN_MASK_DP
)
689 VSX_SCALAR_MOVE_QP(xsnabsqp
, OP_NABS
, SGN_MASK_DP
)
690 VSX_SCALAR_MOVE_QP(xsnegqp
, OP_NEG
, SGN_MASK_DP
)
691 VSX_SCALAR_MOVE_QP(xscpsgnqp
, OP_CPSGN
, SGN_MASK_DP
)
693 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
694 static void glue(gen_, name)(DisasContext * ctx) \
696 TCGv_i64 xbh, xbl, sgm; \
697 if (unlikely(!ctx->vsx_enabled)) { \
698 gen_exception(ctx, POWERPC_EXCP_VSXU); \
701 xbh = tcg_temp_new_i64(); \
702 xbl = tcg_temp_new_i64(); \
703 sgm = tcg_temp_new_i64(); \
704 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
705 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
706 tcg_gen_movi_i64(sgm, sgn_mask); \
709 tcg_gen_andc_i64(xbh, xbh, sgm); \
710 tcg_gen_andc_i64(xbl, xbl, sgm); \
714 tcg_gen_or_i64(xbh, xbh, sgm); \
715 tcg_gen_or_i64(xbl, xbl, sgm); \
719 tcg_gen_xor_i64(xbh, xbh, sgm); \
720 tcg_gen_xor_i64(xbl, xbl, sgm); \
724 TCGv_i64 xah = tcg_temp_new_i64(); \
725 TCGv_i64 xal = tcg_temp_new_i64(); \
726 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
727 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
728 tcg_gen_and_i64(xah, xah, sgm); \
729 tcg_gen_and_i64(xal, xal, sgm); \
730 tcg_gen_andc_i64(xbh, xbh, sgm); \
731 tcg_gen_andc_i64(xbl, xbl, sgm); \
732 tcg_gen_or_i64(xbh, xbh, xah); \
733 tcg_gen_or_i64(xbl, xbl, xal); \
734 tcg_temp_free_i64(xah); \
735 tcg_temp_free_i64(xal); \
739 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
740 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
741 tcg_temp_free_i64(xbh); \
742 tcg_temp_free_i64(xbl); \
743 tcg_temp_free_i64(sgm); \
746 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
747 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
748 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
749 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
750 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
751 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
752 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
753 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
755 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
756 static void gen_##name(DisasContext * ctx) \
759 if (unlikely(!ctx->vsx_enabled)) { \
760 gen_exception(ctx, POWERPC_EXCP_VSXU); \
763 opc = tcg_const_i32(ctx->opcode); \
764 gen_helper_##name(cpu_env, opc); \
765 tcg_temp_free_i32(opc); \
768 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
769 static void gen_##name(DisasContext * ctx) \
771 if (unlikely(!ctx->vsx_enabled)) { \
772 gen_exception(ctx, POWERPC_EXCP_VSXU); \
775 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
776 cpu_vsrh(xB(ctx->opcode))); \
779 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
780 GEN_VSX_HELPER_2(xsaddqp
, 0x04, 0x00, 0, PPC2_ISA300
)
781 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
782 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
783 GEN_VSX_HELPER_2(xsmulqp
, 0x04, 0x01, 0, PPC2_ISA300
)
784 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
785 GEN_VSX_HELPER_2(xsdivqp
, 0x04, 0x11, 0, PPC2_ISA300
)
786 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
787 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
788 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
789 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
790 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
791 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
792 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
793 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
794 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
795 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
796 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
797 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
798 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
799 GEN_VSX_HELPER_2(xscmpeqdp
, 0x0C, 0x00, 0, PPC2_ISA300
)
800 GEN_VSX_HELPER_2(xscmpgtdp
, 0x0C, 0x01, 0, PPC2_ISA300
)
801 GEN_VSX_HELPER_2(xscmpgedp
, 0x0C, 0x02, 0, PPC2_ISA300
)
802 GEN_VSX_HELPER_2(xscmpnedp
, 0x0C, 0x03, 0, PPC2_ISA300
)
803 GEN_VSX_HELPER_2(xscmpexpdp
, 0x0C, 0x07, 0, PPC2_ISA300
)
804 GEN_VSX_HELPER_2(xscmpexpqp
, 0x04, 0x05, 0, PPC2_ISA300
)
805 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
806 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
807 GEN_VSX_HELPER_2(xscmpoqp
, 0x04, 0x04, 0, PPC2_VSX
)
808 GEN_VSX_HELPER_2(xscmpuqp
, 0x04, 0x14, 0, PPC2_VSX
)
809 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
810 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
811 GEN_VSX_HELPER_2(xsmaxcdp
, 0x00, 0x10, 0, PPC2_ISA300
)
812 GEN_VSX_HELPER_2(xsmincdp
, 0x00, 0x11, 0, PPC2_ISA300
)
813 GEN_VSX_HELPER_2(xsmaxjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
814 GEN_VSX_HELPER_2(xsminjdp
, 0x00, 0x12, 0, PPC2_ISA300
)
815 GEN_VSX_HELPER_2(xscvdphp
, 0x16, 0x15, 0x11, PPC2_ISA300
)
816 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
817 GEN_VSX_HELPER_2(xscvdpqp
, 0x04, 0x1A, 0x16, PPC2_ISA300
)
818 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
819 GEN_VSX_HELPER_2(xscvqpdp
, 0x04, 0x1A, 0x14, PPC2_ISA300
)
820 GEN_VSX_HELPER_2(xscvqpsdz
, 0x04, 0x1A, 0x19, PPC2_ISA300
)
821 GEN_VSX_HELPER_2(xscvqpswz
, 0x04, 0x1A, 0x09, PPC2_ISA300
)
822 GEN_VSX_HELPER_2(xscvqpudz
, 0x04, 0x1A, 0x11, PPC2_ISA300
)
823 GEN_VSX_HELPER_2(xscvqpuwz
, 0x04, 0x1A, 0x01, PPC2_ISA300
)
824 GEN_VSX_HELPER_2(xscvhpdp
, 0x16, 0x15, 0x10, PPC2_ISA300
)
825 GEN_VSX_HELPER_2(xscvsdqp
, 0x04, 0x1A, 0x0A, PPC2_ISA300
)
826 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
827 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
828 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
829 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
830 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
831 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
832 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
833 GEN_VSX_HELPER_2(xscvudqp
, 0x04, 0x1A, 0x02, PPC2_ISA300
)
834 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
835 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
836 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
837 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
838 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
839 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
840 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
842 GEN_VSX_HELPER_2(xsrqpi
, 0x05, 0x00, 0, PPC2_ISA300
)
843 GEN_VSX_HELPER_2(xsrqpxp
, 0x05, 0x01, 0, PPC2_ISA300
)
844 GEN_VSX_HELPER_2(xssqrtqp
, 0x04, 0x19, 0x1B, PPC2_ISA300
)
845 GEN_VSX_HELPER_2(xssubqp
, 0x04, 0x10, 0, PPC2_ISA300
)
847 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
848 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
849 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
850 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
851 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
852 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
853 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
854 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
855 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
856 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
857 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
858 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
859 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
860 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
861 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
862 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
863 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
864 GEN_VSX_HELPER_2(xststdcsp
, 0x14, 0x12, 0, PPC2_ISA300
)
865 GEN_VSX_HELPER_2(xststdcdp
, 0x14, 0x16, 0, PPC2_ISA300
)
866 GEN_VSX_HELPER_2(xststdcqp
, 0x04, 0x16, 0, PPC2_ISA300
)
868 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
869 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
870 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
871 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
872 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
873 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
874 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
875 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
876 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
877 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
878 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
879 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
880 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
881 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
882 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
883 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
884 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
885 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
886 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
887 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
888 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
889 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
890 GEN_VSX_HELPER_2(xvcmpnedp
, 0x0C, 0x0F, 0, PPC2_ISA300
)
891 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
892 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
893 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
894 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
895 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
896 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
897 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
898 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
899 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
900 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
901 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
902 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
903 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
904 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
906 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
907 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
908 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
909 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
910 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
911 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
912 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
913 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
914 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
915 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
916 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
917 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
918 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
919 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
920 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
921 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
922 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
923 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
924 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
925 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
926 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
927 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
928 GEN_VSX_HELPER_2(xvcmpnesp
, 0x0C, 0x0B, 0, PPC2_VSX
)
929 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
930 GEN_VSX_HELPER_2(xvcvhpsp
, 0x16, 0x1D, 0x18, PPC2_ISA300
)
931 GEN_VSX_HELPER_2(xvcvsphp
, 0x16, 0x1D, 0x19, PPC2_ISA300
)
932 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
933 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
934 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
935 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
936 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
937 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
938 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
939 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
940 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
941 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
942 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
943 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
944 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
945 GEN_VSX_HELPER_2(xvtstdcsp
, 0x14, 0x1A, 0, PPC2_VSX
)
946 GEN_VSX_HELPER_2(xvtstdcdp
, 0x14, 0x1E, 0, PPC2_VSX
)
947 GEN_VSX_HELPER_2(xxperm
, 0x08, 0x03, 0, PPC2_ISA300
)
948 GEN_VSX_HELPER_2(xxpermr
, 0x08, 0x07, 0, PPC2_ISA300
)
950 static void gen_xxbrd(DisasContext
*ctx
)
952 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
953 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
954 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
955 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
957 if (unlikely(!ctx
->vsx_enabled
)) {
958 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
961 tcg_gen_bswap64_i64(xth
, xbh
);
962 tcg_gen_bswap64_i64(xtl
, xbl
);
965 static void gen_xxbrh(DisasContext
*ctx
)
967 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
968 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
969 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
970 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
972 if (unlikely(!ctx
->vsx_enabled
)) {
973 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
976 gen_bswap16x8(xth
, xtl
, xbh
, xbl
);
979 static void gen_xxbrq(DisasContext
*ctx
)
981 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
982 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
983 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
984 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
985 TCGv_i64 t0
= tcg_temp_new_i64();
987 if (unlikely(!ctx
->vsx_enabled
)) {
988 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
991 tcg_gen_bswap64_i64(t0
, xbl
);
992 tcg_gen_bswap64_i64(xtl
, xbh
);
993 tcg_gen_mov_i64(xth
, t0
);
994 tcg_temp_free_i64(t0
);
997 static void gen_xxbrw(DisasContext
*ctx
)
999 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1000 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1001 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1002 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1004 if (unlikely(!ctx
->vsx_enabled
)) {
1005 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1008 gen_bswap32x4(xth
, xtl
, xbh
, xbl
);
1011 #define VSX_LOGICAL(name, tcg_op) \
1012 static void glue(gen_, name)(DisasContext * ctx) \
1014 if (unlikely(!ctx->vsx_enabled)) { \
1015 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1018 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
1019 cpu_vsrh(xB(ctx->opcode))); \
1020 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
1021 cpu_vsrl(xB(ctx->opcode))); \
1024 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
1025 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
1026 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
1027 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
1028 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
1029 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
1030 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
1031 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
1033 #define VSX_XXMRG(name, high) \
1034 static void glue(gen_, name)(DisasContext * ctx) \
1036 TCGv_i64 a0, a1, b0, b1; \
1037 if (unlikely(!ctx->vsx_enabled)) { \
1038 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1041 a0 = tcg_temp_new_i64(); \
1042 a1 = tcg_temp_new_i64(); \
1043 b0 = tcg_temp_new_i64(); \
1044 b1 = tcg_temp_new_i64(); \
1046 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
1047 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
1048 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
1049 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
1051 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
1052 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
1053 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
1054 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
1056 tcg_gen_shri_i64(a0, a0, 32); \
1057 tcg_gen_shri_i64(b0, b0, 32); \
1058 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
1060 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
1062 tcg_temp_free_i64(a0); \
1063 tcg_temp_free_i64(a1); \
1064 tcg_temp_free_i64(b0); \
1065 tcg_temp_free_i64(b1); \
1068 VSX_XXMRG(xxmrghw
, 1)
1069 VSX_XXMRG(xxmrglw
, 0)
1071 static void gen_xxsel(DisasContext
* ctx
)
1074 if (unlikely(!ctx
->vsx_enabled
)) {
1075 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1078 a
= tcg_temp_new_i64();
1079 b
= tcg_temp_new_i64();
1080 c
= tcg_temp_new_i64();
1082 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
1083 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
1084 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
1086 tcg_gen_and_i64(b
, b
, c
);
1087 tcg_gen_andc_i64(a
, a
, c
);
1088 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
1090 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
1091 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
1092 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
1094 tcg_gen_and_i64(b
, b
, c
);
1095 tcg_gen_andc_i64(a
, a
, c
);
1096 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
1098 tcg_temp_free_i64(a
);
1099 tcg_temp_free_i64(b
);
1100 tcg_temp_free_i64(c
);
1103 static void gen_xxspltw(DisasContext
*ctx
)
1106 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
1107 cpu_vsrl(xB(ctx
->opcode
)) :
1108 cpu_vsrh(xB(ctx
->opcode
));
1110 if (unlikely(!ctx
->vsx_enabled
)) {
1111 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1115 b
= tcg_temp_new_i64();
1116 b2
= tcg_temp_new_i64();
1118 if (UIM(ctx
->opcode
) & 1) {
1119 tcg_gen_ext32u_i64(b
, vsr
);
1121 tcg_gen_shri_i64(b
, vsr
, 32);
1124 tcg_gen_shli_i64(b2
, b
, 32);
1125 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
1126 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
1128 tcg_temp_free_i64(b
);
1129 tcg_temp_free_i64(b2
);
1132 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1134 static void gen_xxspltib(DisasContext
*ctx
)
1136 unsigned char uim8
= IMM8(ctx
->opcode
);
1137 if (xS(ctx
->opcode
) < 32) {
1138 if (unlikely(!ctx
->altivec_enabled
)) {
1139 gen_exception(ctx
, POWERPC_EXCP_VPU
);
1143 if (unlikely(!ctx
->vsx_enabled
)) {
1144 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1148 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), pattern(uim8
));
1149 tcg_gen_movi_i64(cpu_vsrl(xT(ctx
->opcode
)), pattern(uim8
));
1152 static void gen_xxsldwi(DisasContext
*ctx
)
1155 if (unlikely(!ctx
->vsx_enabled
)) {
1156 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1159 xth
= tcg_temp_new_i64();
1160 xtl
= tcg_temp_new_i64();
1162 switch (SHW(ctx
->opcode
)) {
1164 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
1165 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
1169 TCGv_i64 t0
= tcg_temp_new_i64();
1170 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
1171 tcg_gen_shli_i64(xth
, xth
, 32);
1172 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
1173 tcg_gen_shri_i64(t0
, t0
, 32);
1174 tcg_gen_or_i64(xth
, xth
, t0
);
1175 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
1176 tcg_gen_shli_i64(xtl
, xtl
, 32);
1177 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
1178 tcg_gen_shri_i64(t0
, t0
, 32);
1179 tcg_gen_or_i64(xtl
, xtl
, t0
);
1180 tcg_temp_free_i64(t0
);
1184 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
1185 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
1189 TCGv_i64 t0
= tcg_temp_new_i64();
1190 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
1191 tcg_gen_shli_i64(xth
, xth
, 32);
1192 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
1193 tcg_gen_shri_i64(t0
, t0
, 32);
1194 tcg_gen_or_i64(xth
, xth
, t0
);
1195 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
1196 tcg_gen_shli_i64(xtl
, xtl
, 32);
1197 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
1198 tcg_gen_shri_i64(t0
, t0
, 32);
1199 tcg_gen_or_i64(xtl
, xtl
, t0
);
1200 tcg_temp_free_i64(t0
);
1205 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
1206 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
1208 tcg_temp_free_i64(xth
);
1209 tcg_temp_free_i64(xtl
);
1212 #define VSX_EXTRACT_INSERT(name) \
1213 static void gen_##name(DisasContext *ctx) \
1216 TCGv_i32 t0 = tcg_temp_new_i32(); \
1217 uint8_t uimm = UIMM4(ctx->opcode); \
1219 if (unlikely(!ctx->vsx_enabled)) { \
1220 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1223 xt = tcg_const_tl(xT(ctx->opcode)); \
1224 xb = tcg_const_tl(xB(ctx->opcode)); \
1225 /* uimm > 15 out of bound and for \
1226 * uimm > 12 handle as per hardware in helper \
1229 tcg_gen_movi_i64(cpu_vsrh(xT(ctx->opcode)), 0); \
1230 tcg_gen_movi_i64(cpu_vsrl(xT(ctx->opcode)), 0); \
1233 tcg_gen_movi_i32(t0, uimm); \
1234 gen_helper_##name(cpu_env, xt, xb, t0); \
1235 tcg_temp_free(xb); \
1236 tcg_temp_free(xt); \
1237 tcg_temp_free_i32(t0); \
1240 VSX_EXTRACT_INSERT(xxextractuw
)
1241 VSX_EXTRACT_INSERT(xxinsertw
)
1244 static void gen_xsxexpdp(DisasContext
*ctx
)
1246 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1247 if (unlikely(!ctx
->vsx_enabled
)) {
1248 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1251 tcg_gen_extract_i64(rt
, cpu_vsrh(xB(ctx
->opcode
)), 52, 11);
1254 static void gen_xsxexpqp(DisasContext
*ctx
)
1256 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1257 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1258 TCGv_i64 xbh
= cpu_vsrh(rB(ctx
->opcode
) + 32);
1260 if (unlikely(!ctx
->vsx_enabled
)) {
1261 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1264 tcg_gen_extract_i64(xth
, xbh
, 48, 15);
1265 tcg_gen_movi_i64(xtl
, 0);
1268 static void gen_xsiexpdp(DisasContext
*ctx
)
1270 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1271 TCGv ra
= cpu_gpr
[rA(ctx
->opcode
)];
1272 TCGv rb
= cpu_gpr
[rB(ctx
->opcode
)];
1275 if (unlikely(!ctx
->vsx_enabled
)) {
1276 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1279 t0
= tcg_temp_new_i64();
1280 tcg_gen_andi_i64(xth
, ra
, 0x800FFFFFFFFFFFFF);
1281 tcg_gen_andi_i64(t0
, rb
, 0x7FF);
1282 tcg_gen_shli_i64(t0
, t0
, 52);
1283 tcg_gen_or_i64(xth
, xth
, t0
);
1284 /* dword[1] is undefined */
1285 tcg_temp_free_i64(t0
);
1288 static void gen_xsiexpqp(DisasContext
*ctx
)
1290 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1291 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1292 TCGv_i64 xah
= cpu_vsrh(rA(ctx
->opcode
) + 32);
1293 TCGv_i64 xal
= cpu_vsrl(rA(ctx
->opcode
) + 32);
1294 TCGv_i64 xbh
= cpu_vsrh(rB(ctx
->opcode
) + 32);
1297 if (unlikely(!ctx
->vsx_enabled
)) {
1298 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1301 t0
= tcg_temp_new_i64();
1302 tcg_gen_andi_i64(xth
, xah
, 0x8000FFFFFFFFFFFF);
1303 tcg_gen_andi_i64(t0
, xbh
, 0x7FFF);
1304 tcg_gen_shli_i64(t0
, t0
, 48);
1305 tcg_gen_or_i64(xth
, xth
, t0
);
1306 tcg_gen_mov_i64(xtl
, xal
);
1307 tcg_temp_free_i64(t0
);
1310 static void gen_xsxsigdp(DisasContext
*ctx
)
1312 TCGv rt
= cpu_gpr
[rD(ctx
->opcode
)];
1313 TCGv_i64 t0
, zr
, nan
, exp
;
1315 if (unlikely(!ctx
->vsx_enabled
)) {
1316 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1319 exp
= tcg_temp_new_i64();
1320 t0
= tcg_temp_new_i64();
1321 zr
= tcg_const_i64(0);
1322 nan
= tcg_const_i64(2047);
1324 tcg_gen_extract_i64(exp
, cpu_vsrh(xB(ctx
->opcode
)), 52, 11);
1325 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1326 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1327 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1328 tcg_gen_andi_i64(rt
, cpu_vsrh(xB(ctx
->opcode
)), 0x000FFFFFFFFFFFFF);
1329 tcg_gen_or_i64(rt
, rt
, t0
);
1331 tcg_temp_free_i64(t0
);
1332 tcg_temp_free_i64(exp
);
1333 tcg_temp_free_i64(zr
);
1334 tcg_temp_free_i64(nan
);
1337 static void gen_xsxsigqp(DisasContext
*ctx
)
1339 TCGv_i64 t0
, zr
, nan
, exp
;
1340 TCGv_i64 xth
= cpu_vsrh(rD(ctx
->opcode
) + 32);
1341 TCGv_i64 xtl
= cpu_vsrl(rD(ctx
->opcode
) + 32);
1343 if (unlikely(!ctx
->vsx_enabled
)) {
1344 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1347 exp
= tcg_temp_new_i64();
1348 t0
= tcg_temp_new_i64();
1349 zr
= tcg_const_i64(0);
1350 nan
= tcg_const_i64(32767);
1352 tcg_gen_extract_i64(exp
, cpu_vsrh(rB(ctx
->opcode
) + 32), 48, 15);
1353 tcg_gen_movi_i64(t0
, 0x0001000000000000);
1354 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1355 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1356 tcg_gen_andi_i64(xth
, cpu_vsrh(rB(ctx
->opcode
) + 32), 0x0000FFFFFFFFFFFF);
1357 tcg_gen_or_i64(xth
, xth
, t0
);
1358 tcg_gen_mov_i64(xtl
, cpu_vsrl(rB(ctx
->opcode
) + 32));
1360 tcg_temp_free_i64(t0
);
1361 tcg_temp_free_i64(exp
);
1362 tcg_temp_free_i64(zr
);
1363 tcg_temp_free_i64(nan
);
1367 static void gen_xviexpsp(DisasContext
*ctx
)
1369 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1370 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1371 TCGv_i64 xah
= cpu_vsrh(xA(ctx
->opcode
));
1372 TCGv_i64 xal
= cpu_vsrl(xA(ctx
->opcode
));
1373 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1374 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1377 if (unlikely(!ctx
->vsx_enabled
)) {
1378 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1381 t0
= tcg_temp_new_i64();
1382 tcg_gen_andi_i64(xth
, xah
, 0x807FFFFF807FFFFF);
1383 tcg_gen_andi_i64(t0
, xbh
, 0xFF000000FF);
1384 tcg_gen_shli_i64(t0
, t0
, 23);
1385 tcg_gen_or_i64(xth
, xth
, t0
);
1386 tcg_gen_andi_i64(xtl
, xal
, 0x807FFFFF807FFFFF);
1387 tcg_gen_andi_i64(t0
, xbl
, 0xFF000000FF);
1388 tcg_gen_shli_i64(t0
, t0
, 23);
1389 tcg_gen_or_i64(xtl
, xtl
, t0
);
1390 tcg_temp_free_i64(t0
);
1393 static void gen_xviexpdp(DisasContext
*ctx
)
1395 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1396 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1397 TCGv_i64 xah
= cpu_vsrh(xA(ctx
->opcode
));
1398 TCGv_i64 xal
= cpu_vsrl(xA(ctx
->opcode
));
1399 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1400 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1403 if (unlikely(!ctx
->vsx_enabled
)) {
1404 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1407 t0
= tcg_temp_new_i64();
1408 tcg_gen_andi_i64(xth
, xah
, 0x800FFFFFFFFFFFFF);
1409 tcg_gen_andi_i64(t0
, xbh
, 0x7FF);
1410 tcg_gen_shli_i64(t0
, t0
, 52);
1411 tcg_gen_or_i64(xth
, xth
, t0
);
1412 tcg_gen_andi_i64(xtl
, xal
, 0x800FFFFFFFFFFFFF);
1413 tcg_gen_andi_i64(t0
, xbl
, 0x7FF);
1414 tcg_gen_shli_i64(t0
, t0
, 52);
1415 tcg_gen_or_i64(xtl
, xtl
, t0
);
1416 tcg_temp_free_i64(t0
);
1419 static void gen_xvxexpsp(DisasContext
*ctx
)
1421 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1422 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1423 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1424 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1426 if (unlikely(!ctx
->vsx_enabled
)) {
1427 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1430 tcg_gen_shri_i64(xth
, xbh
, 23);
1431 tcg_gen_andi_i64(xth
, xth
, 0xFF000000FF);
1432 tcg_gen_shri_i64(xtl
, xbl
, 23);
1433 tcg_gen_andi_i64(xtl
, xtl
, 0xFF000000FF);
1436 static void gen_xvxexpdp(DisasContext
*ctx
)
1438 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1439 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1440 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1441 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1443 if (unlikely(!ctx
->vsx_enabled
)) {
1444 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1447 tcg_gen_extract_i64(xth
, xbh
, 52, 11);
1448 tcg_gen_extract_i64(xtl
, xbl
, 52, 11);
1451 GEN_VSX_HELPER_2(xvxsigsp
, 0x00, 0x04, 0, PPC2_ISA300
)
1453 static void gen_xvxsigdp(DisasContext
*ctx
)
1455 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
1456 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
1457 TCGv_i64 xbh
= cpu_vsrh(xB(ctx
->opcode
));
1458 TCGv_i64 xbl
= cpu_vsrl(xB(ctx
->opcode
));
1460 TCGv_i64 t0
, zr
, nan
, exp
;
1462 if (unlikely(!ctx
->vsx_enabled
)) {
1463 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
1466 exp
= tcg_temp_new_i64();
1467 t0
= tcg_temp_new_i64();
1468 zr
= tcg_const_i64(0);
1469 nan
= tcg_const_i64(2047);
1471 tcg_gen_extract_i64(exp
, xbh
, 52, 11);
1472 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1473 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1474 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1475 tcg_gen_andi_i64(xth
, xbh
, 0x000FFFFFFFFFFFFF);
1476 tcg_gen_or_i64(xth
, xth
, t0
);
1478 tcg_gen_extract_i64(exp
, xbl
, 52, 11);
1479 tcg_gen_movi_i64(t0
, 0x0010000000000000);
1480 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, zr
, zr
, t0
);
1481 tcg_gen_movcond_i64(TCG_COND_EQ
, t0
, exp
, nan
, zr
, t0
);
1482 tcg_gen_andi_i64(xtl
, xbl
, 0x000FFFFFFFFFFFFF);
1483 tcg_gen_or_i64(xtl
, xtl
, t0
);
1485 tcg_temp_free_i64(t0
);
1486 tcg_temp_free_i64(exp
);
1487 tcg_temp_free_i64(zr
);
1488 tcg_temp_free_i64(nan
);
1494 #undef GEN_XX3_RC_FORM
1495 #undef GEN_XX3FORM_DM