1 /*** VSX extension ***/
3 static inline TCGv_i64
cpu_vsrh(int n
)
12 static inline TCGv_i64
cpu_vsrl(int n
)
17 return cpu_avrl
[n
-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
37 VSX_LOAD_SCALAR(lxsdx
, ld64_i64
)
38 VSX_LOAD_SCALAR(lxsiwax
, ld32s_i64
)
39 VSX_LOAD_SCALAR(lxsibzx
, ld8u_i64
)
40 VSX_LOAD_SCALAR(lxsihzx
, ld16u_i64
)
41 VSX_LOAD_SCALAR(lxsiwzx
, ld32u_i64
)
42 VSX_LOAD_SCALAR(lxsspx
, ld32fs
)
44 static void gen_lxvd2x(DisasContext
*ctx
)
47 if (unlikely(!ctx
->vsx_enabled
)) {
48 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
51 gen_set_access_type(ctx
, ACCESS_INT
);
53 gen_addr_reg_index(ctx
, EA
);
54 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
55 tcg_gen_addi_tl(EA
, EA
, 8);
56 gen_qemu_ld64_i64(ctx
, cpu_vsrl(xT(ctx
->opcode
)), EA
);
60 static void gen_lxvdsx(DisasContext
*ctx
)
63 if (unlikely(!ctx
->vsx_enabled
)) {
64 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
67 gen_set_access_type(ctx
, ACCESS_INT
);
69 gen_addr_reg_index(ctx
, EA
);
70 gen_qemu_ld64_i64(ctx
, cpu_vsrh(xT(ctx
->opcode
)), EA
);
71 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
75 static void gen_lxvw4x(DisasContext
*ctx
)
78 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
79 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
80 if (unlikely(!ctx
->vsx_enabled
)) {
81 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
84 gen_set_access_type(ctx
, ACCESS_INT
);
87 gen_addr_reg_index(ctx
, EA
);
89 TCGv_i64 t0
= tcg_temp_new_i64();
90 TCGv_i64 t1
= tcg_temp_new_i64();
92 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
93 tcg_gen_shri_i64(t1
, t0
, 32);
94 tcg_gen_deposit_i64(xth
, t1
, t0
, 32, 32);
95 tcg_gen_addi_tl(EA
, EA
, 8);
96 tcg_gen_qemu_ld_i64(t0
, EA
, ctx
->mem_idx
, MO_LEQ
);
97 tcg_gen_shri_i64(t1
, t0
, 32);
98 tcg_gen_deposit_i64(xtl
, t1
, t0
, 32, 32);
99 tcg_temp_free_i64(t0
);
100 tcg_temp_free_i64(t1
);
102 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
103 tcg_gen_addi_tl(EA
, EA
, 8);
104 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
109 static void gen_bswap16x8(TCGv_i64 outh
, TCGv_i64 outl
,
110 TCGv_i64 inh
, TCGv_i64 inl
)
112 TCGv_i64 mask
= tcg_const_i64(0x00FF00FF00FF00FF);
113 TCGv_i64 t0
= tcg_temp_new_i64();
114 TCGv_i64 t1
= tcg_temp_new_i64();
116 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
117 tcg_gen_and_i64(t0
, inh
, mask
);
118 tcg_gen_shli_i64(t0
, t0
, 8);
119 tcg_gen_shri_i64(t1
, inh
, 8);
120 tcg_gen_and_i64(t1
, t1
, mask
);
121 tcg_gen_or_i64(outh
, t0
, t1
);
123 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
124 tcg_gen_and_i64(t0
, inl
, mask
);
125 tcg_gen_shli_i64(t0
, t0
, 8);
126 tcg_gen_shri_i64(t1
, inl
, 8);
127 tcg_gen_and_i64(t1
, t1
, mask
);
128 tcg_gen_or_i64(outl
, t0
, t1
);
130 tcg_temp_free_i64(t0
);
131 tcg_temp_free_i64(t1
);
132 tcg_temp_free_i64(mask
);
135 static void gen_lxvh8x(DisasContext
*ctx
)
138 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
139 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
141 if (unlikely(!ctx
->vsx_enabled
)) {
142 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
145 gen_set_access_type(ctx
, ACCESS_INT
);
148 gen_addr_reg_index(ctx
, EA
);
149 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
150 tcg_gen_addi_tl(EA
, EA
, 8);
151 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
153 gen_bswap16x8(xth
, xtl
, xth
, xtl
);
158 static void gen_lxvb16x(DisasContext
*ctx
)
161 TCGv_i64 xth
= cpu_vsrh(xT(ctx
->opcode
));
162 TCGv_i64 xtl
= cpu_vsrl(xT(ctx
->opcode
));
164 if (unlikely(!ctx
->vsx_enabled
)) {
165 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
168 gen_set_access_type(ctx
, ACCESS_INT
);
170 gen_addr_reg_index(ctx
, EA
);
171 tcg_gen_qemu_ld_i64(xth
, EA
, ctx
->mem_idx
, MO_BEQ
);
172 tcg_gen_addi_tl(EA
, EA
, 8);
173 tcg_gen_qemu_ld_i64(xtl
, EA
, ctx
->mem_idx
, MO_BEQ
);
177 #define VSX_STORE_SCALAR(name, operation) \
178 static void gen_##name(DisasContext *ctx) \
181 if (unlikely(!ctx->vsx_enabled)) { \
182 gen_exception(ctx, POWERPC_EXCP_VSXU); \
185 gen_set_access_type(ctx, ACCESS_INT); \
186 EA = tcg_temp_new(); \
187 gen_addr_reg_index(ctx, EA); \
188 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
192 VSX_STORE_SCALAR(stxsdx
, st64_i64
)
194 VSX_STORE_SCALAR(stxsibx
, st8_i64
)
195 VSX_STORE_SCALAR(stxsihx
, st16_i64
)
196 VSX_STORE_SCALAR(stxsiwx
, st32_i64
)
197 VSX_STORE_SCALAR(stxsspx
, st32fs
)
199 static void gen_stxvd2x(DisasContext
*ctx
)
202 if (unlikely(!ctx
->vsx_enabled
)) {
203 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
206 gen_set_access_type(ctx
, ACCESS_INT
);
208 gen_addr_reg_index(ctx
, EA
);
209 gen_qemu_st64_i64(ctx
, cpu_vsrh(xS(ctx
->opcode
)), EA
);
210 tcg_gen_addi_tl(EA
, EA
, 8);
211 gen_qemu_st64_i64(ctx
, cpu_vsrl(xS(ctx
->opcode
)), EA
);
215 static void gen_stxvw4x(DisasContext
*ctx
)
217 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
218 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
220 if (unlikely(!ctx
->vsx_enabled
)) {
221 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
224 gen_set_access_type(ctx
, ACCESS_INT
);
226 gen_addr_reg_index(ctx
, EA
);
228 TCGv_i64 t0
= tcg_temp_new_i64();
229 TCGv_i64 t1
= tcg_temp_new_i64();
231 tcg_gen_shri_i64(t0
, xsh
, 32);
232 tcg_gen_deposit_i64(t1
, t0
, xsh
, 32, 32);
233 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
234 tcg_gen_addi_tl(EA
, EA
, 8);
235 tcg_gen_shri_i64(t0
, xsl
, 32);
236 tcg_gen_deposit_i64(t1
, t0
, xsl
, 32, 32);
237 tcg_gen_qemu_st_i64(t1
, EA
, ctx
->mem_idx
, MO_LEQ
);
238 tcg_temp_free_i64(t0
);
239 tcg_temp_free_i64(t1
);
241 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
242 tcg_gen_addi_tl(EA
, EA
, 8);
243 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
248 static void gen_stxvh8x(DisasContext
*ctx
)
250 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
251 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
254 if (unlikely(!ctx
->vsx_enabled
)) {
255 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
258 gen_set_access_type(ctx
, ACCESS_INT
);
260 gen_addr_reg_index(ctx
, EA
);
262 TCGv_i64 outh
= tcg_temp_new_i64();
263 TCGv_i64 outl
= tcg_temp_new_i64();
265 gen_bswap16x8(outh
, outl
, xsh
, xsl
);
266 tcg_gen_qemu_st_i64(outh
, EA
, ctx
->mem_idx
, MO_BEQ
);
267 tcg_gen_addi_tl(EA
, EA
, 8);
268 tcg_gen_qemu_st_i64(outl
, EA
, ctx
->mem_idx
, MO_BEQ
);
269 tcg_temp_free_i64(outh
);
270 tcg_temp_free_i64(outl
);
272 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
273 tcg_gen_addi_tl(EA
, EA
, 8);
274 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
279 static void gen_stxvb16x(DisasContext
*ctx
)
281 TCGv_i64 xsh
= cpu_vsrh(xS(ctx
->opcode
));
282 TCGv_i64 xsl
= cpu_vsrl(xS(ctx
->opcode
));
285 if (unlikely(!ctx
->vsx_enabled
)) {
286 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
289 gen_set_access_type(ctx
, ACCESS_INT
);
291 gen_addr_reg_index(ctx
, EA
);
292 tcg_gen_qemu_st_i64(xsh
, EA
, ctx
->mem_idx
, MO_BEQ
);
293 tcg_gen_addi_tl(EA
, EA
, 8);
294 tcg_gen_qemu_st_i64(xsl
, EA
, ctx
->mem_idx
, MO_BEQ
);
298 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
299 static void gen_##name(DisasContext *ctx) \
301 if (xS(ctx->opcode) < 32) { \
302 if (unlikely(!ctx->fpu_enabled)) { \
303 gen_exception(ctx, POWERPC_EXCP_FPU); \
307 if (unlikely(!ctx->altivec_enabled)) { \
308 gen_exception(ctx, POWERPC_EXCP_VPU); \
312 TCGv_i64 tmp = tcg_temp_new_i64(); \
313 tcg_gen_##tcgop1(tmp, source); \
314 tcg_gen_##tcgop2(target, tmp); \
315 tcg_temp_free_i64(tmp); \
319 MV_VSRW(mfvsrwz
, ext32u_i64
, trunc_i64_tl
, cpu_gpr
[rA(ctx
->opcode
)], \
320 cpu_vsrh(xS(ctx
->opcode
)))
321 MV_VSRW(mtvsrwa
, extu_tl_i64
, ext32s_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
322 cpu_gpr
[rA(ctx
->opcode
)])
323 MV_VSRW(mtvsrwz
, extu_tl_i64
, ext32u_i64
, cpu_vsrh(xT(ctx
->opcode
)), \
324 cpu_gpr
[rA(ctx
->opcode
)])
326 #if defined(TARGET_PPC64)
327 #define MV_VSRD(name, target, source) \
328 static void gen_##name(DisasContext *ctx) \
330 if (xS(ctx->opcode) < 32) { \
331 if (unlikely(!ctx->fpu_enabled)) { \
332 gen_exception(ctx, POWERPC_EXCP_FPU); \
336 if (unlikely(!ctx->altivec_enabled)) { \
337 gen_exception(ctx, POWERPC_EXCP_VPU); \
341 tcg_gen_mov_i64(target, source); \
344 MV_VSRD(mfvsrd
, cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrh(xS(ctx
->opcode
)))
345 MV_VSRD(mtvsrd
, cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)])
347 static void gen_mfvsrld(DisasContext
*ctx
)
349 if (xS(ctx
->opcode
) < 32) {
350 if (unlikely(!ctx
->vsx_enabled
)) {
351 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
355 if (unlikely(!ctx
->altivec_enabled
)) {
356 gen_exception(ctx
, POWERPC_EXCP_VPU
);
361 tcg_gen_mov_i64(cpu_gpr
[rA(ctx
->opcode
)], cpu_vsrl(xS(ctx
->opcode
)));
364 static void gen_mtvsrdd(DisasContext
*ctx
)
366 if (xT(ctx
->opcode
) < 32) {
367 if (unlikely(!ctx
->vsx_enabled
)) {
368 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
372 if (unlikely(!ctx
->altivec_enabled
)) {
373 gen_exception(ctx
, POWERPC_EXCP_VPU
);
378 if (!rA(ctx
->opcode
)) {
379 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), 0);
381 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)]);
384 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rB(ctx
->opcode
)]);
387 static void gen_mtvsrws(DisasContext
*ctx
)
389 if (xT(ctx
->opcode
) < 32) {
390 if (unlikely(!ctx
->vsx_enabled
)) {
391 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
395 if (unlikely(!ctx
->altivec_enabled
)) {
396 gen_exception(ctx
, POWERPC_EXCP_VPU
);
401 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_gpr
[rA(ctx
->opcode
)],
402 cpu_gpr
[rA(ctx
->opcode
)], 32, 32);
403 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xT(ctx
->opcode
)));
408 static void gen_xxpermdi(DisasContext
*ctx
)
410 if (unlikely(!ctx
->vsx_enabled
)) {
411 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
415 if (unlikely((xT(ctx
->opcode
) == xA(ctx
->opcode
)) ||
416 (xT(ctx
->opcode
) == xB(ctx
->opcode
)))) {
419 xh
= tcg_temp_new_i64();
420 xl
= tcg_temp_new_i64();
422 if ((DM(ctx
->opcode
) & 2) == 0) {
423 tcg_gen_mov_i64(xh
, cpu_vsrh(xA(ctx
->opcode
)));
425 tcg_gen_mov_i64(xh
, cpu_vsrl(xA(ctx
->opcode
)));
427 if ((DM(ctx
->opcode
) & 1) == 0) {
428 tcg_gen_mov_i64(xl
, cpu_vsrh(xB(ctx
->opcode
)));
430 tcg_gen_mov_i64(xl
, cpu_vsrl(xB(ctx
->opcode
)));
433 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xh
);
434 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xl
);
436 tcg_temp_free_i64(xh
);
437 tcg_temp_free_i64(xl
);
439 if ((DM(ctx
->opcode
) & 2) == 0) {
440 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrh(xA(ctx
->opcode
)));
442 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), cpu_vsrl(xA(ctx
->opcode
)));
444 if ((DM(ctx
->opcode
) & 1) == 0) {
445 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xB(ctx
->opcode
)));
447 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrl(xB(ctx
->opcode
)));
456 #define SGN_MASK_DP 0x8000000000000000ull
457 #define SGN_MASK_SP 0x8000000080000000ull
459 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
460 static void glue(gen_, name)(DisasContext * ctx) \
463 if (unlikely(!ctx->vsx_enabled)) { \
464 gen_exception(ctx, POWERPC_EXCP_VSXU); \
467 xb = tcg_temp_new_i64(); \
468 sgm = tcg_temp_new_i64(); \
469 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
470 tcg_gen_movi_i64(sgm, sgn_mask); \
473 tcg_gen_andc_i64(xb, xb, sgm); \
477 tcg_gen_or_i64(xb, xb, sgm); \
481 tcg_gen_xor_i64(xb, xb, sgm); \
485 TCGv_i64 xa = tcg_temp_new_i64(); \
486 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
487 tcg_gen_and_i64(xa, xa, sgm); \
488 tcg_gen_andc_i64(xb, xb, sgm); \
489 tcg_gen_or_i64(xb, xb, xa); \
490 tcg_temp_free_i64(xa); \
494 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
495 tcg_temp_free_i64(xb); \
496 tcg_temp_free_i64(sgm); \
499 VSX_SCALAR_MOVE(xsabsdp
, OP_ABS
, SGN_MASK_DP
)
500 VSX_SCALAR_MOVE(xsnabsdp
, OP_NABS
, SGN_MASK_DP
)
501 VSX_SCALAR_MOVE(xsnegdp
, OP_NEG
, SGN_MASK_DP
)
502 VSX_SCALAR_MOVE(xscpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
504 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
505 static void glue(gen_, name)(DisasContext * ctx) \
507 TCGv_i64 xbh, xbl, sgm; \
508 if (unlikely(!ctx->vsx_enabled)) { \
509 gen_exception(ctx, POWERPC_EXCP_VSXU); \
512 xbh = tcg_temp_new_i64(); \
513 xbl = tcg_temp_new_i64(); \
514 sgm = tcg_temp_new_i64(); \
515 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
516 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
517 tcg_gen_movi_i64(sgm, sgn_mask); \
520 tcg_gen_andc_i64(xbh, xbh, sgm); \
521 tcg_gen_andc_i64(xbl, xbl, sgm); \
525 tcg_gen_or_i64(xbh, xbh, sgm); \
526 tcg_gen_or_i64(xbl, xbl, sgm); \
530 tcg_gen_xor_i64(xbh, xbh, sgm); \
531 tcg_gen_xor_i64(xbl, xbl, sgm); \
535 TCGv_i64 xah = tcg_temp_new_i64(); \
536 TCGv_i64 xal = tcg_temp_new_i64(); \
537 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
538 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
539 tcg_gen_and_i64(xah, xah, sgm); \
540 tcg_gen_and_i64(xal, xal, sgm); \
541 tcg_gen_andc_i64(xbh, xbh, sgm); \
542 tcg_gen_andc_i64(xbl, xbl, sgm); \
543 tcg_gen_or_i64(xbh, xbh, xah); \
544 tcg_gen_or_i64(xbl, xbl, xal); \
545 tcg_temp_free_i64(xah); \
546 tcg_temp_free_i64(xal); \
550 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
551 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
552 tcg_temp_free_i64(xbh); \
553 tcg_temp_free_i64(xbl); \
554 tcg_temp_free_i64(sgm); \
557 VSX_VECTOR_MOVE(xvabsdp
, OP_ABS
, SGN_MASK_DP
)
558 VSX_VECTOR_MOVE(xvnabsdp
, OP_NABS
, SGN_MASK_DP
)
559 VSX_VECTOR_MOVE(xvnegdp
, OP_NEG
, SGN_MASK_DP
)
560 VSX_VECTOR_MOVE(xvcpsgndp
, OP_CPSGN
, SGN_MASK_DP
)
561 VSX_VECTOR_MOVE(xvabssp
, OP_ABS
, SGN_MASK_SP
)
562 VSX_VECTOR_MOVE(xvnabssp
, OP_NABS
, SGN_MASK_SP
)
563 VSX_VECTOR_MOVE(xvnegsp
, OP_NEG
, SGN_MASK_SP
)
564 VSX_VECTOR_MOVE(xvcpsgnsp
, OP_CPSGN
, SGN_MASK_SP
)
566 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
567 static void gen_##name(DisasContext * ctx) \
570 if (unlikely(!ctx->vsx_enabled)) { \
571 gen_exception(ctx, POWERPC_EXCP_VSXU); \
574 opc = tcg_const_i32(ctx->opcode); \
575 gen_helper_##name(cpu_env, opc); \
576 tcg_temp_free_i32(opc); \
579 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
580 static void gen_##name(DisasContext * ctx) \
582 if (unlikely(!ctx->vsx_enabled)) { \
583 gen_exception(ctx, POWERPC_EXCP_VSXU); \
586 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
587 cpu_vsrh(xB(ctx->opcode))); \
590 GEN_VSX_HELPER_2(xsadddp
, 0x00, 0x04, 0, PPC2_VSX
)
591 GEN_VSX_HELPER_2(xssubdp
, 0x00, 0x05, 0, PPC2_VSX
)
592 GEN_VSX_HELPER_2(xsmuldp
, 0x00, 0x06, 0, PPC2_VSX
)
593 GEN_VSX_HELPER_2(xsdivdp
, 0x00, 0x07, 0, PPC2_VSX
)
594 GEN_VSX_HELPER_2(xsredp
, 0x14, 0x05, 0, PPC2_VSX
)
595 GEN_VSX_HELPER_2(xssqrtdp
, 0x16, 0x04, 0, PPC2_VSX
)
596 GEN_VSX_HELPER_2(xsrsqrtedp
, 0x14, 0x04, 0, PPC2_VSX
)
597 GEN_VSX_HELPER_2(xstdivdp
, 0x14, 0x07, 0, PPC2_VSX
)
598 GEN_VSX_HELPER_2(xstsqrtdp
, 0x14, 0x06, 0, PPC2_VSX
)
599 GEN_VSX_HELPER_2(xsmaddadp
, 0x04, 0x04, 0, PPC2_VSX
)
600 GEN_VSX_HELPER_2(xsmaddmdp
, 0x04, 0x05, 0, PPC2_VSX
)
601 GEN_VSX_HELPER_2(xsmsubadp
, 0x04, 0x06, 0, PPC2_VSX
)
602 GEN_VSX_HELPER_2(xsmsubmdp
, 0x04, 0x07, 0, PPC2_VSX
)
603 GEN_VSX_HELPER_2(xsnmaddadp
, 0x04, 0x14, 0, PPC2_VSX
)
604 GEN_VSX_HELPER_2(xsnmaddmdp
, 0x04, 0x15, 0, PPC2_VSX
)
605 GEN_VSX_HELPER_2(xsnmsubadp
, 0x04, 0x16, 0, PPC2_VSX
)
606 GEN_VSX_HELPER_2(xsnmsubmdp
, 0x04, 0x17, 0, PPC2_VSX
)
607 GEN_VSX_HELPER_2(xscmpodp
, 0x0C, 0x05, 0, PPC2_VSX
)
608 GEN_VSX_HELPER_2(xscmpudp
, 0x0C, 0x04, 0, PPC2_VSX
)
609 GEN_VSX_HELPER_2(xsmaxdp
, 0x00, 0x14, 0, PPC2_VSX
)
610 GEN_VSX_HELPER_2(xsmindp
, 0x00, 0x15, 0, PPC2_VSX
)
611 GEN_VSX_HELPER_2(xscvdpsp
, 0x12, 0x10, 0, PPC2_VSX
)
612 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn
, 0x16, 0x10, 0, PPC2_VSX207
)
613 GEN_VSX_HELPER_2(xscvspdp
, 0x12, 0x14, 0, PPC2_VSX
)
614 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn
, 0x16, 0x14, 0, PPC2_VSX207
)
615 GEN_VSX_HELPER_2(xscvdpsxds
, 0x10, 0x15, 0, PPC2_VSX
)
616 GEN_VSX_HELPER_2(xscvdpsxws
, 0x10, 0x05, 0, PPC2_VSX
)
617 GEN_VSX_HELPER_2(xscvdpuxds
, 0x10, 0x14, 0, PPC2_VSX
)
618 GEN_VSX_HELPER_2(xscvdpuxws
, 0x10, 0x04, 0, PPC2_VSX
)
619 GEN_VSX_HELPER_2(xscvsxddp
, 0x10, 0x17, 0, PPC2_VSX
)
620 GEN_VSX_HELPER_2(xscvuxddp
, 0x10, 0x16, 0, PPC2_VSX
)
621 GEN_VSX_HELPER_2(xsrdpi
, 0x12, 0x04, 0, PPC2_VSX
)
622 GEN_VSX_HELPER_2(xsrdpic
, 0x16, 0x06, 0, PPC2_VSX
)
623 GEN_VSX_HELPER_2(xsrdpim
, 0x12, 0x07, 0, PPC2_VSX
)
624 GEN_VSX_HELPER_2(xsrdpip
, 0x12, 0x06, 0, PPC2_VSX
)
625 GEN_VSX_HELPER_2(xsrdpiz
, 0x12, 0x05, 0, PPC2_VSX
)
626 GEN_VSX_HELPER_XT_XB_ENV(xsrsp
, 0x12, 0x11, 0, PPC2_VSX207
)
628 GEN_VSX_HELPER_2(xsaddsp
, 0x00, 0x00, 0, PPC2_VSX207
)
629 GEN_VSX_HELPER_2(xssubsp
, 0x00, 0x01, 0, PPC2_VSX207
)
630 GEN_VSX_HELPER_2(xsmulsp
, 0x00, 0x02, 0, PPC2_VSX207
)
631 GEN_VSX_HELPER_2(xsdivsp
, 0x00, 0x03, 0, PPC2_VSX207
)
632 GEN_VSX_HELPER_2(xsresp
, 0x14, 0x01, 0, PPC2_VSX207
)
633 GEN_VSX_HELPER_2(xssqrtsp
, 0x16, 0x00, 0, PPC2_VSX207
)
634 GEN_VSX_HELPER_2(xsrsqrtesp
, 0x14, 0x00, 0, PPC2_VSX207
)
635 GEN_VSX_HELPER_2(xsmaddasp
, 0x04, 0x00, 0, PPC2_VSX207
)
636 GEN_VSX_HELPER_2(xsmaddmsp
, 0x04, 0x01, 0, PPC2_VSX207
)
637 GEN_VSX_HELPER_2(xsmsubasp
, 0x04, 0x02, 0, PPC2_VSX207
)
638 GEN_VSX_HELPER_2(xsmsubmsp
, 0x04, 0x03, 0, PPC2_VSX207
)
639 GEN_VSX_HELPER_2(xsnmaddasp
, 0x04, 0x10, 0, PPC2_VSX207
)
640 GEN_VSX_HELPER_2(xsnmaddmsp
, 0x04, 0x11, 0, PPC2_VSX207
)
641 GEN_VSX_HELPER_2(xsnmsubasp
, 0x04, 0x12, 0, PPC2_VSX207
)
642 GEN_VSX_HELPER_2(xsnmsubmsp
, 0x04, 0x13, 0, PPC2_VSX207
)
643 GEN_VSX_HELPER_2(xscvsxdsp
, 0x10, 0x13, 0, PPC2_VSX207
)
644 GEN_VSX_HELPER_2(xscvuxdsp
, 0x10, 0x12, 0, PPC2_VSX207
)
646 GEN_VSX_HELPER_2(xvadddp
, 0x00, 0x0C, 0, PPC2_VSX
)
647 GEN_VSX_HELPER_2(xvsubdp
, 0x00, 0x0D, 0, PPC2_VSX
)
648 GEN_VSX_HELPER_2(xvmuldp
, 0x00, 0x0E, 0, PPC2_VSX
)
649 GEN_VSX_HELPER_2(xvdivdp
, 0x00, 0x0F, 0, PPC2_VSX
)
650 GEN_VSX_HELPER_2(xvredp
, 0x14, 0x0D, 0, PPC2_VSX
)
651 GEN_VSX_HELPER_2(xvsqrtdp
, 0x16, 0x0C, 0, PPC2_VSX
)
652 GEN_VSX_HELPER_2(xvrsqrtedp
, 0x14, 0x0C, 0, PPC2_VSX
)
653 GEN_VSX_HELPER_2(xvtdivdp
, 0x14, 0x0F, 0, PPC2_VSX
)
654 GEN_VSX_HELPER_2(xvtsqrtdp
, 0x14, 0x0E, 0, PPC2_VSX
)
655 GEN_VSX_HELPER_2(xvmaddadp
, 0x04, 0x0C, 0, PPC2_VSX
)
656 GEN_VSX_HELPER_2(xvmaddmdp
, 0x04, 0x0D, 0, PPC2_VSX
)
657 GEN_VSX_HELPER_2(xvmsubadp
, 0x04, 0x0E, 0, PPC2_VSX
)
658 GEN_VSX_HELPER_2(xvmsubmdp
, 0x04, 0x0F, 0, PPC2_VSX
)
659 GEN_VSX_HELPER_2(xvnmaddadp
, 0x04, 0x1C, 0, PPC2_VSX
)
660 GEN_VSX_HELPER_2(xvnmaddmdp
, 0x04, 0x1D, 0, PPC2_VSX
)
661 GEN_VSX_HELPER_2(xvnmsubadp
, 0x04, 0x1E, 0, PPC2_VSX
)
662 GEN_VSX_HELPER_2(xvnmsubmdp
, 0x04, 0x1F, 0, PPC2_VSX
)
663 GEN_VSX_HELPER_2(xvmaxdp
, 0x00, 0x1C, 0, PPC2_VSX
)
664 GEN_VSX_HELPER_2(xvmindp
, 0x00, 0x1D, 0, PPC2_VSX
)
665 GEN_VSX_HELPER_2(xvcmpeqdp
, 0x0C, 0x0C, 0, PPC2_VSX
)
666 GEN_VSX_HELPER_2(xvcmpgtdp
, 0x0C, 0x0D, 0, PPC2_VSX
)
667 GEN_VSX_HELPER_2(xvcmpgedp
, 0x0C, 0x0E, 0, PPC2_VSX
)
668 GEN_VSX_HELPER_2(xvcvdpsp
, 0x12, 0x18, 0, PPC2_VSX
)
669 GEN_VSX_HELPER_2(xvcvdpsxds
, 0x10, 0x1D, 0, PPC2_VSX
)
670 GEN_VSX_HELPER_2(xvcvdpsxws
, 0x10, 0x0D, 0, PPC2_VSX
)
671 GEN_VSX_HELPER_2(xvcvdpuxds
, 0x10, 0x1C, 0, PPC2_VSX
)
672 GEN_VSX_HELPER_2(xvcvdpuxws
, 0x10, 0x0C, 0, PPC2_VSX
)
673 GEN_VSX_HELPER_2(xvcvsxddp
, 0x10, 0x1F, 0, PPC2_VSX
)
674 GEN_VSX_HELPER_2(xvcvuxddp
, 0x10, 0x1E, 0, PPC2_VSX
)
675 GEN_VSX_HELPER_2(xvcvsxwdp
, 0x10, 0x0F, 0, PPC2_VSX
)
676 GEN_VSX_HELPER_2(xvcvuxwdp
, 0x10, 0x0E, 0, PPC2_VSX
)
677 GEN_VSX_HELPER_2(xvrdpi
, 0x12, 0x0C, 0, PPC2_VSX
)
678 GEN_VSX_HELPER_2(xvrdpic
, 0x16, 0x0E, 0, PPC2_VSX
)
679 GEN_VSX_HELPER_2(xvrdpim
, 0x12, 0x0F, 0, PPC2_VSX
)
680 GEN_VSX_HELPER_2(xvrdpip
, 0x12, 0x0E, 0, PPC2_VSX
)
681 GEN_VSX_HELPER_2(xvrdpiz
, 0x12, 0x0D, 0, PPC2_VSX
)
683 GEN_VSX_HELPER_2(xvaddsp
, 0x00, 0x08, 0, PPC2_VSX
)
684 GEN_VSX_HELPER_2(xvsubsp
, 0x00, 0x09, 0, PPC2_VSX
)
685 GEN_VSX_HELPER_2(xvmulsp
, 0x00, 0x0A, 0, PPC2_VSX
)
686 GEN_VSX_HELPER_2(xvdivsp
, 0x00, 0x0B, 0, PPC2_VSX
)
687 GEN_VSX_HELPER_2(xvresp
, 0x14, 0x09, 0, PPC2_VSX
)
688 GEN_VSX_HELPER_2(xvsqrtsp
, 0x16, 0x08, 0, PPC2_VSX
)
689 GEN_VSX_HELPER_2(xvrsqrtesp
, 0x14, 0x08, 0, PPC2_VSX
)
690 GEN_VSX_HELPER_2(xvtdivsp
, 0x14, 0x0B, 0, PPC2_VSX
)
691 GEN_VSX_HELPER_2(xvtsqrtsp
, 0x14, 0x0A, 0, PPC2_VSX
)
692 GEN_VSX_HELPER_2(xvmaddasp
, 0x04, 0x08, 0, PPC2_VSX
)
693 GEN_VSX_HELPER_2(xvmaddmsp
, 0x04, 0x09, 0, PPC2_VSX
)
694 GEN_VSX_HELPER_2(xvmsubasp
, 0x04, 0x0A, 0, PPC2_VSX
)
695 GEN_VSX_HELPER_2(xvmsubmsp
, 0x04, 0x0B, 0, PPC2_VSX
)
696 GEN_VSX_HELPER_2(xvnmaddasp
, 0x04, 0x18, 0, PPC2_VSX
)
697 GEN_VSX_HELPER_2(xvnmaddmsp
, 0x04, 0x19, 0, PPC2_VSX
)
698 GEN_VSX_HELPER_2(xvnmsubasp
, 0x04, 0x1A, 0, PPC2_VSX
)
699 GEN_VSX_HELPER_2(xvnmsubmsp
, 0x04, 0x1B, 0, PPC2_VSX
)
700 GEN_VSX_HELPER_2(xvmaxsp
, 0x00, 0x18, 0, PPC2_VSX
)
701 GEN_VSX_HELPER_2(xvminsp
, 0x00, 0x19, 0, PPC2_VSX
)
702 GEN_VSX_HELPER_2(xvcmpeqsp
, 0x0C, 0x08, 0, PPC2_VSX
)
703 GEN_VSX_HELPER_2(xvcmpgtsp
, 0x0C, 0x09, 0, PPC2_VSX
)
704 GEN_VSX_HELPER_2(xvcmpgesp
, 0x0C, 0x0A, 0, PPC2_VSX
)
705 GEN_VSX_HELPER_2(xvcvspdp
, 0x12, 0x1C, 0, PPC2_VSX
)
706 GEN_VSX_HELPER_2(xvcvspsxds
, 0x10, 0x19, 0, PPC2_VSX
)
707 GEN_VSX_HELPER_2(xvcvspsxws
, 0x10, 0x09, 0, PPC2_VSX
)
708 GEN_VSX_HELPER_2(xvcvspuxds
, 0x10, 0x18, 0, PPC2_VSX
)
709 GEN_VSX_HELPER_2(xvcvspuxws
, 0x10, 0x08, 0, PPC2_VSX
)
710 GEN_VSX_HELPER_2(xvcvsxdsp
, 0x10, 0x1B, 0, PPC2_VSX
)
711 GEN_VSX_HELPER_2(xvcvuxdsp
, 0x10, 0x1A, 0, PPC2_VSX
)
712 GEN_VSX_HELPER_2(xvcvsxwsp
, 0x10, 0x0B, 0, PPC2_VSX
)
713 GEN_VSX_HELPER_2(xvcvuxwsp
, 0x10, 0x0A, 0, PPC2_VSX
)
714 GEN_VSX_HELPER_2(xvrspi
, 0x12, 0x08, 0, PPC2_VSX
)
715 GEN_VSX_HELPER_2(xvrspic
, 0x16, 0x0A, 0, PPC2_VSX
)
716 GEN_VSX_HELPER_2(xvrspim
, 0x12, 0x0B, 0, PPC2_VSX
)
717 GEN_VSX_HELPER_2(xvrspip
, 0x12, 0x0A, 0, PPC2_VSX
)
718 GEN_VSX_HELPER_2(xvrspiz
, 0x12, 0x09, 0, PPC2_VSX
)
720 #define VSX_LOGICAL(name, tcg_op) \
721 static void glue(gen_, name)(DisasContext * ctx) \
723 if (unlikely(!ctx->vsx_enabled)) { \
724 gen_exception(ctx, POWERPC_EXCP_VSXU); \
727 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
728 cpu_vsrh(xB(ctx->opcode))); \
729 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
730 cpu_vsrl(xB(ctx->opcode))); \
733 VSX_LOGICAL(xxland
, tcg_gen_and_i64
)
734 VSX_LOGICAL(xxlandc
, tcg_gen_andc_i64
)
735 VSX_LOGICAL(xxlor
, tcg_gen_or_i64
)
736 VSX_LOGICAL(xxlxor
, tcg_gen_xor_i64
)
737 VSX_LOGICAL(xxlnor
, tcg_gen_nor_i64
)
738 VSX_LOGICAL(xxleqv
, tcg_gen_eqv_i64
)
739 VSX_LOGICAL(xxlnand
, tcg_gen_nand_i64
)
740 VSX_LOGICAL(xxlorc
, tcg_gen_orc_i64
)
742 #define VSX_XXMRG(name, high) \
743 static void glue(gen_, name)(DisasContext * ctx) \
745 TCGv_i64 a0, a1, b0, b1; \
746 if (unlikely(!ctx->vsx_enabled)) { \
747 gen_exception(ctx, POWERPC_EXCP_VSXU); \
750 a0 = tcg_temp_new_i64(); \
751 a1 = tcg_temp_new_i64(); \
752 b0 = tcg_temp_new_i64(); \
753 b1 = tcg_temp_new_i64(); \
755 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
756 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
757 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
758 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
760 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
761 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
762 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
763 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
765 tcg_gen_shri_i64(a0, a0, 32); \
766 tcg_gen_shri_i64(b0, b0, 32); \
767 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
769 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
771 tcg_temp_free_i64(a0); \
772 tcg_temp_free_i64(a1); \
773 tcg_temp_free_i64(b0); \
774 tcg_temp_free_i64(b1); \
777 VSX_XXMRG(xxmrghw
, 1)
778 VSX_XXMRG(xxmrglw
, 0)
780 static void gen_xxsel(DisasContext
* ctx
)
783 if (unlikely(!ctx
->vsx_enabled
)) {
784 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
787 a
= tcg_temp_new_i64();
788 b
= tcg_temp_new_i64();
789 c
= tcg_temp_new_i64();
791 tcg_gen_mov_i64(a
, cpu_vsrh(xA(ctx
->opcode
)));
792 tcg_gen_mov_i64(b
, cpu_vsrh(xB(ctx
->opcode
)));
793 tcg_gen_mov_i64(c
, cpu_vsrh(xC(ctx
->opcode
)));
795 tcg_gen_and_i64(b
, b
, c
);
796 tcg_gen_andc_i64(a
, a
, c
);
797 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), a
, b
);
799 tcg_gen_mov_i64(a
, cpu_vsrl(xA(ctx
->opcode
)));
800 tcg_gen_mov_i64(b
, cpu_vsrl(xB(ctx
->opcode
)));
801 tcg_gen_mov_i64(c
, cpu_vsrl(xC(ctx
->opcode
)));
803 tcg_gen_and_i64(b
, b
, c
);
804 tcg_gen_andc_i64(a
, a
, c
);
805 tcg_gen_or_i64(cpu_vsrl(xT(ctx
->opcode
)), a
, b
);
807 tcg_temp_free_i64(a
);
808 tcg_temp_free_i64(b
);
809 tcg_temp_free_i64(c
);
812 static void gen_xxspltw(DisasContext
*ctx
)
815 TCGv_i64 vsr
= (UIM(ctx
->opcode
) & 2) ?
816 cpu_vsrl(xB(ctx
->opcode
)) :
817 cpu_vsrh(xB(ctx
->opcode
));
819 if (unlikely(!ctx
->vsx_enabled
)) {
820 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
824 b
= tcg_temp_new_i64();
825 b2
= tcg_temp_new_i64();
827 if (UIM(ctx
->opcode
) & 1) {
828 tcg_gen_ext32u_i64(b
, vsr
);
830 tcg_gen_shri_i64(b
, vsr
, 32);
833 tcg_gen_shli_i64(b2
, b
, 32);
834 tcg_gen_or_i64(cpu_vsrh(xT(ctx
->opcode
)), b
, b2
);
835 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), cpu_vsrh(xT(ctx
->opcode
)));
837 tcg_temp_free_i64(b
);
838 tcg_temp_free_i64(b2
);
841 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
843 static void gen_xxspltib(DisasContext
*ctx
)
845 unsigned char uim8
= IMM8(ctx
->opcode
);
846 if (xS(ctx
->opcode
) < 32) {
847 if (unlikely(!ctx
->altivec_enabled
)) {
848 gen_exception(ctx
, POWERPC_EXCP_VPU
);
852 if (unlikely(!ctx
->vsx_enabled
)) {
853 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
857 tcg_gen_movi_i64(cpu_vsrh(xT(ctx
->opcode
)), pattern(uim8
));
858 tcg_gen_movi_i64(cpu_vsrl(xT(ctx
->opcode
)), pattern(uim8
));
861 static void gen_xxsldwi(DisasContext
*ctx
)
864 if (unlikely(!ctx
->vsx_enabled
)) {
865 gen_exception(ctx
, POWERPC_EXCP_VSXU
);
868 xth
= tcg_temp_new_i64();
869 xtl
= tcg_temp_new_i64();
871 switch (SHW(ctx
->opcode
)) {
873 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
874 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
878 TCGv_i64 t0
= tcg_temp_new_i64();
879 tcg_gen_mov_i64(xth
, cpu_vsrh(xA(ctx
->opcode
)));
880 tcg_gen_shli_i64(xth
, xth
, 32);
881 tcg_gen_mov_i64(t0
, cpu_vsrl(xA(ctx
->opcode
)));
882 tcg_gen_shri_i64(t0
, t0
, 32);
883 tcg_gen_or_i64(xth
, xth
, t0
);
884 tcg_gen_mov_i64(xtl
, cpu_vsrl(xA(ctx
->opcode
)));
885 tcg_gen_shli_i64(xtl
, xtl
, 32);
886 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
887 tcg_gen_shri_i64(t0
, t0
, 32);
888 tcg_gen_or_i64(xtl
, xtl
, t0
);
889 tcg_temp_free_i64(t0
);
893 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
894 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
898 TCGv_i64 t0
= tcg_temp_new_i64();
899 tcg_gen_mov_i64(xth
, cpu_vsrl(xA(ctx
->opcode
)));
900 tcg_gen_shli_i64(xth
, xth
, 32);
901 tcg_gen_mov_i64(t0
, cpu_vsrh(xB(ctx
->opcode
)));
902 tcg_gen_shri_i64(t0
, t0
, 32);
903 tcg_gen_or_i64(xth
, xth
, t0
);
904 tcg_gen_mov_i64(xtl
, cpu_vsrh(xB(ctx
->opcode
)));
905 tcg_gen_shli_i64(xtl
, xtl
, 32);
906 tcg_gen_mov_i64(t0
, cpu_vsrl(xB(ctx
->opcode
)));
907 tcg_gen_shri_i64(t0
, t0
, 32);
908 tcg_gen_or_i64(xtl
, xtl
, t0
);
909 tcg_temp_free_i64(t0
);
914 tcg_gen_mov_i64(cpu_vsrh(xT(ctx
->opcode
)), xth
);
915 tcg_gen_mov_i64(cpu_vsrl(xT(ctx
->opcode
)), xtl
);
917 tcg_temp_free_i64(xth
);
918 tcg_temp_free_i64(xtl
);
924 #undef GEN_XX3_RC_FORM
925 #undef GEN_XX3FORM_DM