ppc: Don't update the NIP in floating point generated code
[qemu/kevin.git] / target-ppc / translate / vsx-impl.c
blob9f77b06bd8f8f0fd5e53b91d29adfd14c1afbc51
1 /*** VSX extension ***/
3 static inline TCGv_i64 cpu_vsrh(int n)
5 if (n < 32) {
6 return cpu_fpr[n];
7 } else {
8 return cpu_avrh[n-32];
12 static inline TCGv_i64 cpu_vsrl(int n)
14 if (n < 32) {
15 return cpu_vsr[n];
16 } else {
17 return cpu_avrl[n-32];
21 #define VSX_LOAD_SCALAR(name, operation) \
22 static void gen_##name(DisasContext *ctx) \
23 { \
24 TCGv EA; \
25 if (unlikely(!ctx->vsx_enabled)) { \
26 gen_exception(ctx, POWERPC_EXCP_VSXU); \
27 return; \
28 } \
29 gen_set_access_type(ctx, ACCESS_INT); \
30 EA = tcg_temp_new(); \
31 gen_addr_reg_index(ctx, EA); \
32 gen_qemu_##operation(ctx, cpu_vsrh(xT(ctx->opcode)), EA); \
33 /* NOTE: cpu_vsrl is undefined */ \
34 tcg_temp_free(EA); \
37 VSX_LOAD_SCALAR(lxsdx, ld64)
38 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
39 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
40 VSX_LOAD_SCALAR(lxsspx, ld32fs)
42 static void gen_lxvd2x(DisasContext *ctx)
44 TCGv EA;
45 if (unlikely(!ctx->vsx_enabled)) {
46 gen_exception(ctx, POWERPC_EXCP_VSXU);
47 return;
49 gen_set_access_type(ctx, ACCESS_INT);
50 EA = tcg_temp_new();
51 gen_addr_reg_index(ctx, EA);
52 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
53 tcg_gen_addi_tl(EA, EA, 8);
54 gen_qemu_ld64(ctx, cpu_vsrl(xT(ctx->opcode)), EA);
55 tcg_temp_free(EA);
58 static void gen_lxvdsx(DisasContext *ctx)
60 TCGv EA;
61 if (unlikely(!ctx->vsx_enabled)) {
62 gen_exception(ctx, POWERPC_EXCP_VSXU);
63 return;
65 gen_set_access_type(ctx, ACCESS_INT);
66 EA = tcg_temp_new();
67 gen_addr_reg_index(ctx, EA);
68 gen_qemu_ld64(ctx, cpu_vsrh(xT(ctx->opcode)), EA);
69 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
70 tcg_temp_free(EA);
73 static void gen_lxvw4x(DisasContext *ctx)
75 TCGv EA;
76 TCGv_i64 tmp;
77 TCGv_i64 xth = cpu_vsrh(xT(ctx->opcode));
78 TCGv_i64 xtl = cpu_vsrl(xT(ctx->opcode));
79 if (unlikely(!ctx->vsx_enabled)) {
80 gen_exception(ctx, POWERPC_EXCP_VSXU);
81 return;
83 gen_set_access_type(ctx, ACCESS_INT);
84 EA = tcg_temp_new();
85 tmp = tcg_temp_new_i64();
87 gen_addr_reg_index(ctx, EA);
88 gen_qemu_ld32u_i64(ctx, tmp, EA);
89 tcg_gen_addi_tl(EA, EA, 4);
90 gen_qemu_ld32u_i64(ctx, xth, EA);
91 tcg_gen_deposit_i64(xth, xth, tmp, 32, 32);
93 tcg_gen_addi_tl(EA, EA, 4);
94 gen_qemu_ld32u_i64(ctx, tmp, EA);
95 tcg_gen_addi_tl(EA, EA, 4);
96 gen_qemu_ld32u_i64(ctx, xtl, EA);
97 tcg_gen_deposit_i64(xtl, xtl, tmp, 32, 32);
99 tcg_temp_free(EA);
100 tcg_temp_free_i64(tmp);
103 #define VSX_STORE_SCALAR(name, operation) \
104 static void gen_##name(DisasContext *ctx) \
106 TCGv EA; \
107 if (unlikely(!ctx->vsx_enabled)) { \
108 gen_exception(ctx, POWERPC_EXCP_VSXU); \
109 return; \
111 gen_set_access_type(ctx, ACCESS_INT); \
112 EA = tcg_temp_new(); \
113 gen_addr_reg_index(ctx, EA); \
114 gen_qemu_##operation(ctx, cpu_vsrh(xS(ctx->opcode)), EA); \
115 tcg_temp_free(EA); \
118 VSX_STORE_SCALAR(stxsdx, st64)
119 VSX_STORE_SCALAR(stxsiwx, st32_i64)
120 VSX_STORE_SCALAR(stxsspx, st32fs)
122 static void gen_stxvd2x(DisasContext *ctx)
124 TCGv EA;
125 if (unlikely(!ctx->vsx_enabled)) {
126 gen_exception(ctx, POWERPC_EXCP_VSXU);
127 return;
129 gen_set_access_type(ctx, ACCESS_INT);
130 EA = tcg_temp_new();
131 gen_addr_reg_index(ctx, EA);
132 gen_qemu_st64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
133 tcg_gen_addi_tl(EA, EA, 8);
134 gen_qemu_st64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
135 tcg_temp_free(EA);
138 static void gen_stxvw4x(DisasContext *ctx)
140 TCGv_i64 tmp;
141 TCGv EA;
142 if (unlikely(!ctx->vsx_enabled)) {
143 gen_exception(ctx, POWERPC_EXCP_VSXU);
144 return;
146 gen_set_access_type(ctx, ACCESS_INT);
147 EA = tcg_temp_new();
148 gen_addr_reg_index(ctx, EA);
149 tmp = tcg_temp_new_i64();
151 tcg_gen_shri_i64(tmp, cpu_vsrh(xS(ctx->opcode)), 32);
152 gen_qemu_st32_i64(ctx, tmp, EA);
153 tcg_gen_addi_tl(EA, EA, 4);
154 gen_qemu_st32_i64(ctx, cpu_vsrh(xS(ctx->opcode)), EA);
156 tcg_gen_shri_i64(tmp, cpu_vsrl(xS(ctx->opcode)), 32);
157 tcg_gen_addi_tl(EA, EA, 4);
158 gen_qemu_st32_i64(ctx, tmp, EA);
159 tcg_gen_addi_tl(EA, EA, 4);
160 gen_qemu_st32_i64(ctx, cpu_vsrl(xS(ctx->opcode)), EA);
162 tcg_temp_free(EA);
163 tcg_temp_free_i64(tmp);
166 #define MV_VSRW(name, tcgop1, tcgop2, target, source) \
167 static void gen_##name(DisasContext *ctx) \
169 if (xS(ctx->opcode) < 32) { \
170 if (unlikely(!ctx->fpu_enabled)) { \
171 gen_exception(ctx, POWERPC_EXCP_FPU); \
172 return; \
174 } else { \
175 if (unlikely(!ctx->altivec_enabled)) { \
176 gen_exception(ctx, POWERPC_EXCP_VPU); \
177 return; \
180 TCGv_i64 tmp = tcg_temp_new_i64(); \
181 tcg_gen_##tcgop1(tmp, source); \
182 tcg_gen_##tcgop2(target, tmp); \
183 tcg_temp_free_i64(tmp); \
187 MV_VSRW(mfvsrwz, ext32u_i64, trunc_i64_tl, cpu_gpr[rA(ctx->opcode)], \
188 cpu_vsrh(xS(ctx->opcode)))
189 MV_VSRW(mtvsrwa, extu_tl_i64, ext32s_i64, cpu_vsrh(xT(ctx->opcode)), \
190 cpu_gpr[rA(ctx->opcode)])
191 MV_VSRW(mtvsrwz, extu_tl_i64, ext32u_i64, cpu_vsrh(xT(ctx->opcode)), \
192 cpu_gpr[rA(ctx->opcode)])
194 #if defined(TARGET_PPC64)
195 #define MV_VSRD(name, target, source) \
196 static void gen_##name(DisasContext *ctx) \
198 if (xS(ctx->opcode) < 32) { \
199 if (unlikely(!ctx->fpu_enabled)) { \
200 gen_exception(ctx, POWERPC_EXCP_FPU); \
201 return; \
203 } else { \
204 if (unlikely(!ctx->altivec_enabled)) { \
205 gen_exception(ctx, POWERPC_EXCP_VPU); \
206 return; \
209 tcg_gen_mov_i64(target, source); \
212 MV_VSRD(mfvsrd, cpu_gpr[rA(ctx->opcode)], cpu_vsrh(xS(ctx->opcode)))
213 MV_VSRD(mtvsrd, cpu_vsrh(xT(ctx->opcode)), cpu_gpr[rA(ctx->opcode)])
215 #endif
217 static void gen_xxpermdi(DisasContext *ctx)
219 if (unlikely(!ctx->vsx_enabled)) {
220 gen_exception(ctx, POWERPC_EXCP_VSXU);
221 return;
224 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
225 (xT(ctx->opcode) == xB(ctx->opcode)))) {
226 TCGv_i64 xh, xl;
228 xh = tcg_temp_new_i64();
229 xl = tcg_temp_new_i64();
231 if ((DM(ctx->opcode) & 2) == 0) {
232 tcg_gen_mov_i64(xh, cpu_vsrh(xA(ctx->opcode)));
233 } else {
234 tcg_gen_mov_i64(xh, cpu_vsrl(xA(ctx->opcode)));
236 if ((DM(ctx->opcode) & 1) == 0) {
237 tcg_gen_mov_i64(xl, cpu_vsrh(xB(ctx->opcode)));
238 } else {
239 tcg_gen_mov_i64(xl, cpu_vsrl(xB(ctx->opcode)));
242 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xh);
243 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xl);
245 tcg_temp_free_i64(xh);
246 tcg_temp_free_i64(xl);
247 } else {
248 if ((DM(ctx->opcode) & 2) == 0) {
249 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)));
250 } else {
251 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)));
253 if ((DM(ctx->opcode) & 1) == 0) {
254 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xB(ctx->opcode)));
255 } else {
256 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xB(ctx->opcode)));
261 #define OP_ABS 1
262 #define OP_NABS 2
263 #define OP_NEG 3
264 #define OP_CPSGN 4
265 #define SGN_MASK_DP 0x8000000000000000ull
266 #define SGN_MASK_SP 0x8000000080000000ull
268 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
269 static void glue(gen_, name)(DisasContext * ctx) \
271 TCGv_i64 xb, sgm; \
272 if (unlikely(!ctx->vsx_enabled)) { \
273 gen_exception(ctx, POWERPC_EXCP_VSXU); \
274 return; \
276 xb = tcg_temp_new_i64(); \
277 sgm = tcg_temp_new_i64(); \
278 tcg_gen_mov_i64(xb, cpu_vsrh(xB(ctx->opcode))); \
279 tcg_gen_movi_i64(sgm, sgn_mask); \
280 switch (op) { \
281 case OP_ABS: { \
282 tcg_gen_andc_i64(xb, xb, sgm); \
283 break; \
285 case OP_NABS: { \
286 tcg_gen_or_i64(xb, xb, sgm); \
287 break; \
289 case OP_NEG: { \
290 tcg_gen_xor_i64(xb, xb, sgm); \
291 break; \
293 case OP_CPSGN: { \
294 TCGv_i64 xa = tcg_temp_new_i64(); \
295 tcg_gen_mov_i64(xa, cpu_vsrh(xA(ctx->opcode))); \
296 tcg_gen_and_i64(xa, xa, sgm); \
297 tcg_gen_andc_i64(xb, xb, sgm); \
298 tcg_gen_or_i64(xb, xb, xa); \
299 tcg_temp_free_i64(xa); \
300 break; \
303 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xb); \
304 tcg_temp_free_i64(xb); \
305 tcg_temp_free_i64(sgm); \
308 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
309 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
310 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
311 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
313 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
314 static void glue(gen_, name)(DisasContext * ctx) \
316 TCGv_i64 xbh, xbl, sgm; \
317 if (unlikely(!ctx->vsx_enabled)) { \
318 gen_exception(ctx, POWERPC_EXCP_VSXU); \
319 return; \
321 xbh = tcg_temp_new_i64(); \
322 xbl = tcg_temp_new_i64(); \
323 sgm = tcg_temp_new_i64(); \
324 tcg_gen_mov_i64(xbh, cpu_vsrh(xB(ctx->opcode))); \
325 tcg_gen_mov_i64(xbl, cpu_vsrl(xB(ctx->opcode))); \
326 tcg_gen_movi_i64(sgm, sgn_mask); \
327 switch (op) { \
328 case OP_ABS: { \
329 tcg_gen_andc_i64(xbh, xbh, sgm); \
330 tcg_gen_andc_i64(xbl, xbl, sgm); \
331 break; \
333 case OP_NABS: { \
334 tcg_gen_or_i64(xbh, xbh, sgm); \
335 tcg_gen_or_i64(xbl, xbl, sgm); \
336 break; \
338 case OP_NEG: { \
339 tcg_gen_xor_i64(xbh, xbh, sgm); \
340 tcg_gen_xor_i64(xbl, xbl, sgm); \
341 break; \
343 case OP_CPSGN: { \
344 TCGv_i64 xah = tcg_temp_new_i64(); \
345 TCGv_i64 xal = tcg_temp_new_i64(); \
346 tcg_gen_mov_i64(xah, cpu_vsrh(xA(ctx->opcode))); \
347 tcg_gen_mov_i64(xal, cpu_vsrl(xA(ctx->opcode))); \
348 tcg_gen_and_i64(xah, xah, sgm); \
349 tcg_gen_and_i64(xal, xal, sgm); \
350 tcg_gen_andc_i64(xbh, xbh, sgm); \
351 tcg_gen_andc_i64(xbl, xbl, sgm); \
352 tcg_gen_or_i64(xbh, xbh, xah); \
353 tcg_gen_or_i64(xbl, xbl, xal); \
354 tcg_temp_free_i64(xah); \
355 tcg_temp_free_i64(xal); \
356 break; \
359 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xbh); \
360 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xbl); \
361 tcg_temp_free_i64(xbh); \
362 tcg_temp_free_i64(xbl); \
363 tcg_temp_free_i64(sgm); \
366 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
367 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
368 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
369 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
370 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
371 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
372 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
373 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
375 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
376 static void gen_##name(DisasContext * ctx) \
378 TCGv_i32 opc; \
379 if (unlikely(!ctx->vsx_enabled)) { \
380 gen_exception(ctx, POWERPC_EXCP_VSXU); \
381 return; \
383 opc = tcg_const_i32(ctx->opcode); \
384 gen_helper_##name(cpu_env, opc); \
385 tcg_temp_free_i32(opc); \
388 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
389 static void gen_##name(DisasContext * ctx) \
391 if (unlikely(!ctx->vsx_enabled)) { \
392 gen_exception(ctx, POWERPC_EXCP_VSXU); \
393 return; \
395 gen_helper_##name(cpu_vsrh(xT(ctx->opcode)), cpu_env, \
396 cpu_vsrh(xB(ctx->opcode))); \
399 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
400 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
401 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
402 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
403 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
404 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
405 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
406 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
407 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
408 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
409 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
410 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
411 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
412 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
413 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
414 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
415 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
416 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
417 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
418 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
419 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
420 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
421 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
422 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
423 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
424 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
425 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
426 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
427 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
428 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
429 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
430 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
431 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
432 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
433 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
434 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
435 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
437 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
438 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
439 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
440 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
441 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
442 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
443 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
444 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
445 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
446 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
447 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
448 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
449 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
450 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
451 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
452 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
453 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
455 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
456 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
457 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
458 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
459 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
460 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
461 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
462 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
463 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
464 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
465 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
466 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
467 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
468 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
469 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
470 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
471 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
472 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
473 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
474 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
475 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
476 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
477 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
478 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
479 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
480 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
481 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
482 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
483 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
484 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
485 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
486 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
487 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
488 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
489 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
490 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
492 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
493 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
494 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
495 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
496 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
497 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
498 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
499 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
500 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
501 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
502 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
503 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
504 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
505 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
506 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
507 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
508 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
509 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
510 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
511 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
512 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
513 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
514 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
515 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
516 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
517 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
518 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
519 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
520 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
521 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
522 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
523 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
524 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
525 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
526 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
527 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
529 #define VSX_LOGICAL(name, tcg_op) \
530 static void glue(gen_, name)(DisasContext * ctx) \
532 if (unlikely(!ctx->vsx_enabled)) { \
533 gen_exception(ctx, POWERPC_EXCP_VSXU); \
534 return; \
536 tcg_op(cpu_vsrh(xT(ctx->opcode)), cpu_vsrh(xA(ctx->opcode)), \
537 cpu_vsrh(xB(ctx->opcode))); \
538 tcg_op(cpu_vsrl(xT(ctx->opcode)), cpu_vsrl(xA(ctx->opcode)), \
539 cpu_vsrl(xB(ctx->opcode))); \
542 VSX_LOGICAL(xxland, tcg_gen_and_i64)
543 VSX_LOGICAL(xxlandc, tcg_gen_andc_i64)
544 VSX_LOGICAL(xxlor, tcg_gen_or_i64)
545 VSX_LOGICAL(xxlxor, tcg_gen_xor_i64)
546 VSX_LOGICAL(xxlnor, tcg_gen_nor_i64)
547 VSX_LOGICAL(xxleqv, tcg_gen_eqv_i64)
548 VSX_LOGICAL(xxlnand, tcg_gen_nand_i64)
549 VSX_LOGICAL(xxlorc, tcg_gen_orc_i64)
551 #define VSX_XXMRG(name, high) \
552 static void glue(gen_, name)(DisasContext * ctx) \
554 TCGv_i64 a0, a1, b0, b1; \
555 if (unlikely(!ctx->vsx_enabled)) { \
556 gen_exception(ctx, POWERPC_EXCP_VSXU); \
557 return; \
559 a0 = tcg_temp_new_i64(); \
560 a1 = tcg_temp_new_i64(); \
561 b0 = tcg_temp_new_i64(); \
562 b1 = tcg_temp_new_i64(); \
563 if (high) { \
564 tcg_gen_mov_i64(a0, cpu_vsrh(xA(ctx->opcode))); \
565 tcg_gen_mov_i64(a1, cpu_vsrh(xA(ctx->opcode))); \
566 tcg_gen_mov_i64(b0, cpu_vsrh(xB(ctx->opcode))); \
567 tcg_gen_mov_i64(b1, cpu_vsrh(xB(ctx->opcode))); \
568 } else { \
569 tcg_gen_mov_i64(a0, cpu_vsrl(xA(ctx->opcode))); \
570 tcg_gen_mov_i64(a1, cpu_vsrl(xA(ctx->opcode))); \
571 tcg_gen_mov_i64(b0, cpu_vsrl(xB(ctx->opcode))); \
572 tcg_gen_mov_i64(b1, cpu_vsrl(xB(ctx->opcode))); \
574 tcg_gen_shri_i64(a0, a0, 32); \
575 tcg_gen_shri_i64(b0, b0, 32); \
576 tcg_gen_deposit_i64(cpu_vsrh(xT(ctx->opcode)), \
577 b0, a0, 32, 32); \
578 tcg_gen_deposit_i64(cpu_vsrl(xT(ctx->opcode)), \
579 b1, a1, 32, 32); \
580 tcg_temp_free_i64(a0); \
581 tcg_temp_free_i64(a1); \
582 tcg_temp_free_i64(b0); \
583 tcg_temp_free_i64(b1); \
586 VSX_XXMRG(xxmrghw, 1)
587 VSX_XXMRG(xxmrglw, 0)
589 static void gen_xxsel(DisasContext * ctx)
591 TCGv_i64 a, b, c;
592 if (unlikely(!ctx->vsx_enabled)) {
593 gen_exception(ctx, POWERPC_EXCP_VSXU);
594 return;
596 a = tcg_temp_new_i64();
597 b = tcg_temp_new_i64();
598 c = tcg_temp_new_i64();
600 tcg_gen_mov_i64(a, cpu_vsrh(xA(ctx->opcode)));
601 tcg_gen_mov_i64(b, cpu_vsrh(xB(ctx->opcode)));
602 tcg_gen_mov_i64(c, cpu_vsrh(xC(ctx->opcode)));
604 tcg_gen_and_i64(b, b, c);
605 tcg_gen_andc_i64(a, a, c);
606 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), a, b);
608 tcg_gen_mov_i64(a, cpu_vsrl(xA(ctx->opcode)));
609 tcg_gen_mov_i64(b, cpu_vsrl(xB(ctx->opcode)));
610 tcg_gen_mov_i64(c, cpu_vsrl(xC(ctx->opcode)));
612 tcg_gen_and_i64(b, b, c);
613 tcg_gen_andc_i64(a, a, c);
614 tcg_gen_or_i64(cpu_vsrl(xT(ctx->opcode)), a, b);
616 tcg_temp_free_i64(a);
617 tcg_temp_free_i64(b);
618 tcg_temp_free_i64(c);
621 static void gen_xxspltw(DisasContext *ctx)
623 TCGv_i64 b, b2;
624 TCGv_i64 vsr = (UIM(ctx->opcode) & 2) ?
625 cpu_vsrl(xB(ctx->opcode)) :
626 cpu_vsrh(xB(ctx->opcode));
628 if (unlikely(!ctx->vsx_enabled)) {
629 gen_exception(ctx, POWERPC_EXCP_VSXU);
630 return;
633 b = tcg_temp_new_i64();
634 b2 = tcg_temp_new_i64();
636 if (UIM(ctx->opcode) & 1) {
637 tcg_gen_ext32u_i64(b, vsr);
638 } else {
639 tcg_gen_shri_i64(b, vsr, 32);
642 tcg_gen_shli_i64(b2, b, 32);
643 tcg_gen_or_i64(cpu_vsrh(xT(ctx->opcode)), b, b2);
644 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), cpu_vsrh(xT(ctx->opcode)));
646 tcg_temp_free_i64(b);
647 tcg_temp_free_i64(b2);
650 static void gen_xxsldwi(DisasContext *ctx)
652 TCGv_i64 xth, xtl;
653 if (unlikely(!ctx->vsx_enabled)) {
654 gen_exception(ctx, POWERPC_EXCP_VSXU);
655 return;
657 xth = tcg_temp_new_i64();
658 xtl = tcg_temp_new_i64();
660 switch (SHW(ctx->opcode)) {
661 case 0: {
662 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
663 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
664 break;
666 case 1: {
667 TCGv_i64 t0 = tcg_temp_new_i64();
668 tcg_gen_mov_i64(xth, cpu_vsrh(xA(ctx->opcode)));
669 tcg_gen_shli_i64(xth, xth, 32);
670 tcg_gen_mov_i64(t0, cpu_vsrl(xA(ctx->opcode)));
671 tcg_gen_shri_i64(t0, t0, 32);
672 tcg_gen_or_i64(xth, xth, t0);
673 tcg_gen_mov_i64(xtl, cpu_vsrl(xA(ctx->opcode)));
674 tcg_gen_shli_i64(xtl, xtl, 32);
675 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
676 tcg_gen_shri_i64(t0, t0, 32);
677 tcg_gen_or_i64(xtl, xtl, t0);
678 tcg_temp_free_i64(t0);
679 break;
681 case 2: {
682 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
683 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
684 break;
686 case 3: {
687 TCGv_i64 t0 = tcg_temp_new_i64();
688 tcg_gen_mov_i64(xth, cpu_vsrl(xA(ctx->opcode)));
689 tcg_gen_shli_i64(xth, xth, 32);
690 tcg_gen_mov_i64(t0, cpu_vsrh(xB(ctx->opcode)));
691 tcg_gen_shri_i64(t0, t0, 32);
692 tcg_gen_or_i64(xth, xth, t0);
693 tcg_gen_mov_i64(xtl, cpu_vsrh(xB(ctx->opcode)));
694 tcg_gen_shli_i64(xtl, xtl, 32);
695 tcg_gen_mov_i64(t0, cpu_vsrl(xB(ctx->opcode)));
696 tcg_gen_shri_i64(t0, t0, 32);
697 tcg_gen_or_i64(xtl, xtl, t0);
698 tcg_temp_free_i64(t0);
699 break;
703 tcg_gen_mov_i64(cpu_vsrh(xT(ctx->opcode)), xth);
704 tcg_gen_mov_i64(cpu_vsrl(xT(ctx->opcode)), xtl);
706 tcg_temp_free_i64(xth);
707 tcg_temp_free_i64(xtl);
710 #undef GEN_XX2FORM
711 #undef GEN_XX3FORM
712 #undef GEN_XX2IFORM
713 #undef GEN_XX3_RC_FORM
714 #undef GEN_XX3FORM_DM
715 #undef VSX_LOGICAL