ppc: Move VSX ops out of translate.c
[qemu/kevin.git] / target-ppc / translate / vsx-impl.c
blob7361c4779c7f708dbb07b0073932269649a6a680
1 /*** VSX extension ***/
3 static inline TCGv_i64 cpu_vsrh(int n)
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
12 static inline TCGv_i64 cpu_vsrl(int n)
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
23 { \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
37 VSX_LOAD_SCALAR(lxsdx, ld64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
40 VSX_LOAD_SCALAR(lxsspx, ld32fs)
42 static void gen_lxvd2x(DisasContext *ctx)
44 TCGv EA;
45 if (unlikely(!ctx->vsx_enabled)) {
46 gen_exception(ctx, POWERPC_EXCP_VSXU);
47 return;
49 gen_set_access_type(ctx, ACCESS_INT);
50 EA = tcg_temp_new();
51 gen_addr_reg_index(ctx, EA);
52 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
53 tcg_gen_addi_tl(EA, EA, 8);
54 gen_qemu_ld64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
55 tcg_temp_free(EA);
58 static void gen_lxvdsx(DisasContext *ctx)
60 TCGv EA;
61 if (unlikely(!ctx->vsx_enabled)) {
62 gen_exception(ctx, POWERPC_EXCP_VSXU);
63 return;
65 gen_set_access_type(ctx, ACCESS_INT);
66 EA = tcg_temp_new();
67 gen_addr_reg_index(ctx, EA);
68 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
69 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
70 tcg_temp_free(EA);
73 static void gen_lxvw4x(DisasContext *ctx)
75 TCGv EA;
76 TCGv_i64 tmp;
77 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
78 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
79 if (unlikely(!ctx->vsx_enabled)) {
80 gen_exception(ctx, POWERPC_EXCP_VSXU);
81 return;
83 gen_set_access_type(ctx, ACCESS_INT);
84 EA = tcg_temp_new();
85 tmp = tcg_temp_new_i64();
87 gen_addr_reg_index(ctx, EA);
88 gen_qemu_ld32u_i64(ctx, tmp, EA);
89 tcg_gen_addi_tl(EA, EA, 4);
90 gen_qemu_ld32u_i64(ctx, xth, EA);
91 tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
93 tcg_gen_addi_tl(EA, EA, 4);
94 gen_qemu_ld32u_i64(ctx, tmp, EA);
95 tcg_gen_addi_tl(EA, EA, 4);
96 gen_qemu_ld32u_i64(ctx, xtl, EA);
97 tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
99 tcg_temp_free(EA);
100 tcg_temp_free_i64(tmp);
103 #define VSX_STORE_SCALAR(name, operation) \
104 static void gen_##name(DisasContext *ctx) \
106 TCGv EA; \
107 if (unlikely(!ctx->vsx_enabled)) { \
108 gen_exception(ctx, POWERPC_EXCP_VSXU); \
109 return; \
111 gen_set_access_type(ctx, ACCESS_INT); \
112 EA = tcg_temp_new(); \
113 gen_addr_reg_index(ctx, EA); \
114 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
115 tcg_temp_free(EA); \
118 VSX_STORE_SCALAR(stxsdx, st64)
119 VSX_STORE_SCALAR(stxsiwx, st32_i64)
120 VSX_STORE_SCALAR(stxsspx, st32fs)
122 static void gen_stxvd2x(DisasContext *ctx)
124 TCGv EA;
125 if (unlikely(!ctx->vsx_enabled)) {
126 gen_exception(ctx, POWERPC_EXCP_VSXU);
127 return;
129 gen_set_access_type(ctx, ACCESS_INT);
130 EA = tcg_temp_new();
131 gen_addr_reg_index(ctx, EA);
132 gen_qemu_st64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
133 tcg_gen_addi_tl(EA, EA, 8);
134 gen_qemu_st64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
135 tcg_temp_free(EA);
138 static void gen_stxvw4x(DisasContext *ctx)
140 TCGv_i64 tmp;
141 TCGv EA;
142 if (unlikely(!ctx->vsx_enabled)) {
143 gen_exception(ctx, POWERPC_EXCP_VSXU);
144 return;
146 gen_set_access_type(ctx, ACCESS_INT);
147 EA = tcg_temp_new();
148 gen_addr_reg_index(ctx, EA);
149 tmp = tcg_temp_new_i64();
151 tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
152 gen_qemu_st32_i64(ctx, tmp, EA);
153 tcg_gen_addi_tl(EA, EA, 4);
154 gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
156 tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
157 tcg_gen_addi_tl(EA, EA, 4);
158 gen_qemu_st32_i64(ctx, tmp, EA);
159 tcg_gen_addi_tl(EA, EA, 4);
160 gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
162 tcg_temp_free(EA);
163 tcg_temp_free_i64(tmp);
166 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
167 static void gen_##name(DisasContext *ctx) \
169 if (xS(ctx->opcode) < 32) { \
170 if (unlikely(!ctx->fpu_enabled)) { \
171 gen_exception(ctx, POWERPC_EXCP_FPU); \
172 return; \
174 } else { \
175 if (unlikely(!ctx->altivec_enabled)) { \
176 gen_exception(ctx, POWERPC_EXCP_VPU); \
177 return; \
180 TCGv_i64 tmp = tcg_temp_new_i64(); \
181 tcg_gen_##tcgop1(tmp, source); \
182 tcg_gen_##tcgop2(target, tmp); \
183 tcg_temp_free_i64(tmp); \
187 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
188 cpu_vsrh(xS(ctx->opcode)))
189 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
190 cpu_gpr[rA(ctx->opcode)])
191 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
192 cpu_gpr[rA(ctx->opcode)])
194 #if defined(TARGET_PPC64)
195 #define MV_VSRD(name, target, source) \
196 static void gen_##name(DisasContext *ctx) \
198 if (xS(ctx->opcode) < 32) { \
199 if (unlikely(!ctx->fpu_enabled)) { \
200 gen_exception(ctx, POWERPC_EXCP_FPU); \
201 return; \
203 } else { \
204 if (unlikely(!ctx->altivec_enabled)) { \
205 gen_exception(ctx, POWERPC_EXCP_VPU); \
206 return; \
209 tcg_gen_mov_i64(target, source); \
212 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
213 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
215 #endif
217 static void gen_xxpermdi(DisasContext *ctx)
219 if (unlikely(!ctx->vsx_enabled)) {
220 gen_exception(ctx, POWERPC_EXCP_VSXU);
221 return;
224 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
225 (xT(ctx->opcode) == xB(ctx->opcode)))) {
226 TCGv_i64 xh, xl;
228 xh = tcg_temp_new_i64();
229 xl = tcg_temp_new_i64();
231 if ((DM(ctx->opcode) & 2) == 0) {
232 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
233 } else {
234 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
236 if ((DM(ctx->opcode) & 1) == 0) {
237 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
238 } else {
239 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
242 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
243 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
245 tcg_temp_free_i64(xh);
246 tcg_temp_free_i64(xl);
247 } else {
248 if ((DM(ctx->opcode) & 2) == 0) {
249 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
250 } else {
251 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
253 if ((DM(ctx->opcode) & 1) == 0) {
254 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
255 } else {
256 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
261 #define OP_ABS 1
262 #define OP_NABS 2
263 #define OP_NEG 3
264 #define OP_CPSGN 4
265 #define SGN_MASK_DP 0x8000000000000000ull
266 #define SGN_MASK_SP 0x8000000080000000ull
268 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
269 static void glue(gen_, name)(DisasContext * ctx) \
271 TCGv_i64 xb, sgm; \
272 if (unlikely(!ctx->vsx_enabled)) { \
273 gen_exception(ctx, POWERPC_EXCP_VSXU); \
274 return; \
276 xb = tcg_temp_new_i64(); \
277 sgm = tcg_temp_new_i64(); \
278 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
279 tcg_gen_movi_i64(sgm, sgn_mask); \
280 switch (op) { \
281 case OP_ABS: { \
282 tcg_gen_andc_i64(xb, xb, sgm); \
283 break; \
285 case OP_NABS: { \
286 tcg_gen_or_i64(xb, xb, sgm); \
287 break; \
289 case OP_NEG: { \
290 tcg_gen_xor_i64(xb, xb, sgm); \
291 break; \
293 case OP_CPSGN: { \
294 TCGv_i64 xa = tcg_temp_new_i64(); \
295 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
296 tcg_gen_and_i64(xa, xa, sgm); \
297 tcg_gen_andc_i64(xb, xb, sgm); \
298 tcg_gen_or_i64(xb, xb, xa); \
299 tcg_temp_free_i64(xa); \
300 break; \
303 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
304 tcg_temp_free_i64(xb); \
305 tcg_temp_free_i64(sgm); \
308 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
309 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
310 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
311 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
313 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
314 static void glue(gen_, name)(DisasContext * ctx) \
316 TCGv_i64 xbh, xbl, sgm; \
317 if (unlikely(!ctx->vsx_enabled)) { \
318 gen_exception(ctx, POWERPC_EXCP_VSXU); \
319 return; \
321 xbh = tcg_temp_new_i64(); \
322 xbl = tcg_temp_new_i64(); \
323 sgm = tcg_temp_new_i64(); \
324 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
325 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
326 tcg_gen_movi_i64(sgm, sgn_mask); \
327 switch (op) { \
328 case OP_ABS: { \
329 tcg_gen_andc_i64(xbh, xbh, sgm); \
330 tcg_gen_andc_i64(xbl, xbl, sgm); \
331 break; \
333 case OP_NABS: { \
334 tcg_gen_or_i64(xbh, xbh, sgm); \
335 tcg_gen_or_i64(xbl, xbl, sgm); \
336 break; \
338 case OP_NEG: { \
339 tcg_gen_xor_i64(xbh, xbh, sgm); \
340 tcg_gen_xor_i64(xbl, xbl, sgm); \
341 break; \
343 case OP_CPSGN: { \
344 TCGv_i64 xah = tcg_temp_new_i64(); \
345 TCGv_i64 xal = tcg_temp_new_i64(); \
346 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
347 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
348 tcg_gen_and_i64(xah, xah, sgm); \
349 tcg_gen_and_i64(xal, xal, sgm); \
350 tcg_gen_andc_i64(xbh, xbh, sgm); \
351 tcg_gen_andc_i64(xbl, xbl, sgm); \
352 tcg_gen_or_i64(xbh, xbh, xah); \
353 tcg_gen_or_i64(xbl, xbl, xal); \
354 tcg_temp_free_i64(xah); \
355 tcg_temp_free_i64(xal); \
356 break; \
359 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
360 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
361 tcg_temp_free_i64(xbh); \
362 tcg_temp_free_i64(xbl); \
363 tcg_temp_free_i64(sgm); \
366 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
367 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
368 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
369 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
370 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
371 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
372 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
373 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
375 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
376 static void gen_##name(DisasContext * ctx) \
378 TCGv_i32 opc; \
379 if (unlikely(!ctx->vsx_enabled)) { \
380 gen_exception(ctx, POWERPC_EXCP_VSXU); \
381 return; \
383 /* NIP cannot be restored if the memory exception comes from an helper */ \
384 gen_update_nip(ctx, ctx->nip - 4); \
385 opc = tcg_const_i32(ctx->opcode); \
386 gen_helper_##name(cpu_env, opc); \
387 tcg_temp_free_i32(opc); \
390 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
391 static void gen_##name(DisasContext * ctx) \
393 if (unlikely(!ctx->vsx_enabled)) { \
394 gen_exception(ctx, POWERPC_EXCP_VSXU); \
395 return; \
397 /* NIP cannot be restored if the exception comes */ \
398 /* from a helper. */ \
399 gen_update_nip(ctx, ctx->nip - 4); \
401 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
402 cpu_vsrh(xB(ctx->opcode))); \
405 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
406 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
407 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
408 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
409 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
410 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
411 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
412 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
413 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
414 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
415 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
416 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
417 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
418 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
419 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
420 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
421 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
422 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
423 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
424 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
425 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
426 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
427 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
428 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
429 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
430 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
431 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
432 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
433 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
434 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
435 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
436 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
437 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
438 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
439 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
440 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
441 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
443 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
444 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
445 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
446 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
447 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
448 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
449 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
450 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
451 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
452 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
453 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
454 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
455 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
456 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
457 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
458 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
459 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
461 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
462 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
463 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
464 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
465 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
466 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
467 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
468 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
469 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
470 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
471 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
472 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
473 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
474 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
475 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
476 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
477 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
478 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
479 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
480 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
481 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
482 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
483 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
484 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
485 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
486 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
487 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
488 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
489 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
490 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
491 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
492 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
493 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
494 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
495 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
496 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
498 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
499 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
500 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
501 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
502 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
503 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
504 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
505 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
506 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
507 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
508 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
509 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
510 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
511 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
512 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
513 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
514 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
515 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
516 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
517 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
518 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
519 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
520 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
521 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
522 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
523 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
524 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
525 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
526 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
527 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
528 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
529 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
530 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
531 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
532 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
533 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
535 #define VSX_LOGICAL(name, tcg_op) \
536 static void glue(gen_, name)(DisasContext * ctx) \
538 if (unlikely(!ctx->vsx_enabled)) { \
539 gen_exception(ctx, POWERPC_EXCP_VSXU); \
540 return; \
542 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
543 cpu_vsrh(xB(ctx->opcode))); \
544 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
545 cpu_vsrl(xB(ctx->opcode))); \
548 VSX_LOGICAL(xxland, tcg_gen_and_i64)
549 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
550 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
551 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
552 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
553 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
554 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
555 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
557 #define VSX_XXMRG(name, high) \
558 static void glue(gen_, name)(DisasContext * ctx) \
560 TCGv_i64 a0, a1, b0, b1; \
561 if (unlikely(!ctx->vsx_enabled)) { \
562 gen_exception(ctx, POWERPC_EXCP_VSXU); \
563 return; \
565 a0 = tcg_temp_new_i64(); \
566 a1 = tcg_temp_new_i64(); \
567 b0 = tcg_temp_new_i64(); \
568 b1 = tcg_temp_new_i64(); \
569 if (high) { \
570 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
571 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
572 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
573 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
574 } else { \
575 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
576 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
577 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
578 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
580 tcg_gen_shri_i64(a0, a0, 32); \
581 tcg_gen_shri_i64(b0, b0, 32); \
582 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
583 b0, a0, 32, 32); \
584 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
585 b1, a1, 32, 32); \
586 tcg_temp_free_i64(a0); \
587 tcg_temp_free_i64(a1); \
588 tcg_temp_free_i64(b0); \
589 tcg_temp_free_i64(b1); \
592 VSX_XXMRG(xxmrghw, 1)
593 VSX_XXMRG(xxmrglw, 0)
595 static void gen_xxsel(DisasContext * ctx)
597 TCGv_i64 a, b, c;
598 if (unlikely(!ctx->vsx_enabled)) {
599 gen_exception(ctx, POWERPC_EXCP_VSXU);
600 return;
602 a = tcg_temp_new_i64();
603 b = tcg_temp_new_i64();
604 c = tcg_temp_new_i64();
606 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
607 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
608 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
610 tcg_gen_and_i64(b, b, c);
611 tcg_gen_andc_i64(a, a, c);
612 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
614 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
615 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
616 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
618 tcg_gen_and_i64(b, b, c);
619 tcg_gen_andc_i64(a, a, c);
620 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
622 tcg_temp_free_i64(a);
623 tcg_temp_free_i64(b);
624 tcg_temp_free_i64(c);
627 static void gen_xxspltw(DisasContext *ctx)
629 TCGv_i64 b, b2;
630 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
631 cpu_vsrl(xB(ctx->opcode)) :
632 cpu_vsrh(xB(ctx->opcode));
634 if (unlikely(!ctx->vsx_enabled)) {
635 gen_exception(ctx, POWERPC_EXCP_VSXU);
636 return;
639 b = tcg_temp_new_i64();
640 b2 = tcg_temp_new_i64();
642 if (UIM(ctx->opcode) & 1) {
643 tcg_gen_ext32u_i64(b, vsr);
644 } else {
645 tcg_gen_shri_i64(b, vsr, 32);
648 tcg_gen_shli_i64(b2, b, 32);
649 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
650 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
652 tcg_temp_free_i64(b);
653 tcg_temp_free_i64(b2);
656 static void gen_xxsldwi(DisasContext *ctx)
658 TCGv_i64 xth, xtl;
659 if (unlikely(!ctx->vsx_enabled)) {
660 gen_exception(ctx, POWERPC_EXCP_VSXU);
661 return;
663 xth = tcg_temp_new_i64();
664 xtl = tcg_temp_new_i64();
666 switch (SHW(ctx->opcode)) {
667 case 0: {
668 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
669 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
670 break;
672 case 1: {
673 TCGv_i64 t0 = tcg_temp_new_i64();
674 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
675 tcg_gen_shli_i64(xth, xth, 32);
676 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
677 tcg_gen_shri_i64(t0, t0, 32);
678 tcg_gen_or_i64(xth, xth, t0);
679 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
680 tcg_gen_shli_i64(xtl, xtl, 32);
681 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
682 tcg_gen_shri_i64(t0, t0, 32);
683 tcg_gen_or_i64(xtl, xtl, t0);
684 tcg_temp_free_i64(t0);
685 break;
687 case 2: {
688 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
689 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
690 break;
692 case 3: {
693 TCGv_i64 t0 = tcg_temp_new_i64();
694 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
695 tcg_gen_shli_i64(xth, xth, 32);
696 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
697 tcg_gen_shri_i64(t0, t0, 32);
698 tcg_gen_or_i64(xth, xth, t0);
699 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
700 tcg_gen_shli_i64(xtl, xtl, 32);
701 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
702 tcg_gen_shri_i64(t0, t0, 32);
703 tcg_gen_or_i64(xtl, xtl, t0);
704 tcg_temp_free_i64(t0);
705 break;
709 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
710 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
712 tcg_temp_free_i64(xth);
713 tcg_temp_free_i64(xtl);
716 #undef GEN_XX2FORM
717 #undef GEN_XX3FORM
718 #undef GEN_XX2IFORM
719 #undef GEN_XX3_RC_FORM
720 #undef GEN_XX3FORM_DM
721 #undef VSX_LOGICAL