target/ppc: Optimize emulation of lvsl and lvsr instructions
[qemu/ar7.git] / target / ppc / translate / vmx-impl.inc.c
bloba9fe3c7834f1f183834d11def68d907184315a44
1 /*
2 * translate/vmx-impl.c
4 * Altivec/VMX translation
5 */
7 /*** Altivec vector extension ***/
8 /* Altivec registers moves */
10 static inline TCGv_ptr gen_avr_ptr(int reg)
12 TCGv_ptr r = tcg_temp_new_ptr();
13 tcg_gen_addi_ptr(r, cpu_env, avr_full_offset(reg));
14 return r;
17 #define GEN_VR_LDX(name, opc2, opc3) \
18 static void glue(gen_, name)(DisasContext *ctx) \
19 { \
20 TCGv EA; \
21 TCGv_i64 avr; \
22 if (unlikely(!ctx->altivec_enabled)) { \
23 gen_exception(ctx, POWERPC_EXCP_VPU); \
24 return; \
25 } \
26 gen_set_access_type(ctx, ACCESS_INT); \
27 avr = tcg_temp_new_i64(); \
28 EA = tcg_temp_new(); \
29 gen_addr_reg_index(ctx, EA); \
30 tcg_gen_andi_tl(EA, EA, ~0xf); \
31 /* \
32 * We only need to swap high and low halves. gen_qemu_ld64_i64 \
33 * does necessary 64-bit byteswap already. \
34 */ \
35 if (ctx->le_mode) { \
36 gen_qemu_ld64_i64(ctx, avr, EA); \
37 set_avr64(rD(ctx->opcode), avr, false); \
38 tcg_gen_addi_tl(EA, EA, 8); \
39 gen_qemu_ld64_i64(ctx, avr, EA); \
40 set_avr64(rD(ctx->opcode), avr, true); \
41 } else { \
42 gen_qemu_ld64_i64(ctx, avr, EA); \
43 set_avr64(rD(ctx->opcode), avr, true); \
44 tcg_gen_addi_tl(EA, EA, 8); \
45 gen_qemu_ld64_i64(ctx, avr, EA); \
46 set_avr64(rD(ctx->opcode), avr, false); \
47 } \
48 tcg_temp_free(EA); \
49 tcg_temp_free_i64(avr); \
52 #define GEN_VR_STX(name, opc2, opc3) \
53 static void gen_st##name(DisasContext *ctx) \
54 { \
55 TCGv EA; \
56 TCGv_i64 avr; \
57 if (unlikely(!ctx->altivec_enabled)) { \
58 gen_exception(ctx, POWERPC_EXCP_VPU); \
59 return; \
60 } \
61 gen_set_access_type(ctx, ACCESS_INT); \
62 avr = tcg_temp_new_i64(); \
63 EA = tcg_temp_new(); \
64 gen_addr_reg_index(ctx, EA); \
65 tcg_gen_andi_tl(EA, EA, ~0xf); \
66 /* \
67 * We only need to swap high and low halves. gen_qemu_st64_i64 \
68 * does necessary 64-bit byteswap already. \
69 */ \
70 if (ctx->le_mode) { \
71 get_avr64(avr, rD(ctx->opcode), false); \
72 gen_qemu_st64_i64(ctx, avr, EA); \
73 tcg_gen_addi_tl(EA, EA, 8); \
74 get_avr64(avr, rD(ctx->opcode), true); \
75 gen_qemu_st64_i64(ctx, avr, EA); \
76 } else { \
77 get_avr64(avr, rD(ctx->opcode), true); \
78 gen_qemu_st64_i64(ctx, avr, EA); \
79 tcg_gen_addi_tl(EA, EA, 8); \
80 get_avr64(avr, rD(ctx->opcode), false); \
81 gen_qemu_st64_i64(ctx, avr, EA); \
82 } \
83 tcg_temp_free(EA); \
84 tcg_temp_free_i64(avr); \
87 #define GEN_VR_LVE(name, opc2, opc3, size) \
88 static void gen_lve##name(DisasContext *ctx) \
89 { \
90 TCGv EA; \
91 TCGv_ptr rs; \
92 if (unlikely(!ctx->altivec_enabled)) { \
93 gen_exception(ctx, POWERPC_EXCP_VPU); \
94 return; \
95 } \
96 gen_set_access_type(ctx, ACCESS_INT); \
97 EA = tcg_temp_new(); \
98 gen_addr_reg_index(ctx, EA); \
99 if (size > 1) { \
100 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
102 rs = gen_avr_ptr(rS(ctx->opcode)); \
103 gen_helper_lve##name(cpu_env, rs, EA); \
104 tcg_temp_free(EA); \
105 tcg_temp_free_ptr(rs); \
108 #define GEN_VR_STVE(name, opc2, opc3, size) \
109 static void gen_stve##name(DisasContext *ctx) \
111 TCGv EA; \
112 TCGv_ptr rs; \
113 if (unlikely(!ctx->altivec_enabled)) { \
114 gen_exception(ctx, POWERPC_EXCP_VPU); \
115 return; \
117 gen_set_access_type(ctx, ACCESS_INT); \
118 EA = tcg_temp_new(); \
119 gen_addr_reg_index(ctx, EA); \
120 if (size > 1) { \
121 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
123 rs = gen_avr_ptr(rS(ctx->opcode)); \
124 gen_helper_stve##name(cpu_env, rs, EA); \
125 tcg_temp_free(EA); \
126 tcg_temp_free_ptr(rs); \
129 GEN_VR_LDX(lvx, 0x07, 0x03);
130 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
131 GEN_VR_LDX(lvxl, 0x07, 0x0B);
133 GEN_VR_LVE(bx, 0x07, 0x00, 1);
134 GEN_VR_LVE(hx, 0x07, 0x01, 2);
135 GEN_VR_LVE(wx, 0x07, 0x02, 4);
137 GEN_VR_STX(svx, 0x07, 0x07);
138 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
139 GEN_VR_STX(svxl, 0x07, 0x0F);
141 GEN_VR_STVE(bx, 0x07, 0x04, 1);
142 GEN_VR_STVE(hx, 0x07, 0x05, 2);
143 GEN_VR_STVE(wx, 0x07, 0x06, 4);
145 static void gen_mfvscr(DisasContext *ctx)
147 TCGv_i32 t;
148 TCGv_i64 avr;
149 if (unlikely(!ctx->altivec_enabled)) {
150 gen_exception(ctx, POWERPC_EXCP_VPU);
151 return;
153 avr = tcg_temp_new_i64();
154 tcg_gen_movi_i64(avr, 0);
155 set_avr64(rD(ctx->opcode), avr, true);
156 t = tcg_temp_new_i32();
157 gen_helper_mfvscr(t, cpu_env);
158 tcg_gen_extu_i32_i64(avr, t);
159 set_avr64(rD(ctx->opcode), avr, false);
160 tcg_temp_free_i32(t);
161 tcg_temp_free_i64(avr);
164 static void gen_mtvscr(DisasContext *ctx)
166 TCGv_i32 val;
167 int bofs;
169 if (unlikely(!ctx->altivec_enabled)) {
170 gen_exception(ctx, POWERPC_EXCP_VPU);
171 return;
174 val = tcg_temp_new_i32();
175 bofs = avr_full_offset(rB(ctx->opcode));
176 #ifdef HOST_WORDS_BIGENDIAN
177 bofs += 3 * 4;
178 #endif
180 tcg_gen_ld_i32(val, cpu_env, bofs);
181 gen_helper_mtvscr(cpu_env, val);
182 tcg_temp_free_i32(val);
185 #define GEN_VX_VMUL10(name, add_cin, ret_carry) \
186 static void glue(gen_, name)(DisasContext *ctx) \
188 TCGv_i64 t0; \
189 TCGv_i64 t1; \
190 TCGv_i64 t2; \
191 TCGv_i64 avr; \
192 TCGv_i64 ten, z; \
194 if (unlikely(!ctx->altivec_enabled)) { \
195 gen_exception(ctx, POWERPC_EXCP_VPU); \
196 return; \
199 t0 = tcg_temp_new_i64(); \
200 t1 = tcg_temp_new_i64(); \
201 t2 = tcg_temp_new_i64(); \
202 avr = tcg_temp_new_i64(); \
203 ten = tcg_const_i64(10); \
204 z = tcg_const_i64(0); \
206 if (add_cin) { \
207 get_avr64(avr, rA(ctx->opcode), false); \
208 tcg_gen_mulu2_i64(t0, t1, avr, ten); \
209 get_avr64(avr, rB(ctx->opcode), false); \
210 tcg_gen_andi_i64(t2, avr, 0xF); \
211 tcg_gen_add2_i64(avr, t2, t0, t1, t2, z); \
212 set_avr64(rD(ctx->opcode), avr, false); \
213 } else { \
214 get_avr64(avr, rA(ctx->opcode), false); \
215 tcg_gen_mulu2_i64(avr, t2, avr, ten); \
216 set_avr64(rD(ctx->opcode), avr, false); \
219 if (ret_carry) { \
220 get_avr64(avr, rA(ctx->opcode), true); \
221 tcg_gen_mulu2_i64(t0, t1, avr, ten); \
222 tcg_gen_add2_i64(t0, avr, t0, t1, t2, z); \
223 set_avr64(rD(ctx->opcode), avr, false); \
224 set_avr64(rD(ctx->opcode), z, true); \
225 } else { \
226 get_avr64(avr, rA(ctx->opcode), true); \
227 tcg_gen_mul_i64(t0, avr, ten); \
228 tcg_gen_add_i64(avr, t0, t2); \
229 set_avr64(rD(ctx->opcode), avr, true); \
232 tcg_temp_free_i64(t0); \
233 tcg_temp_free_i64(t1); \
234 tcg_temp_free_i64(t2); \
235 tcg_temp_free_i64(avr); \
236 tcg_temp_free_i64(ten); \
237 tcg_temp_free_i64(z); \
240 GEN_VX_VMUL10(vmul10uq, 0, 0);
241 GEN_VX_VMUL10(vmul10euq, 1, 0);
242 GEN_VX_VMUL10(vmul10cuq, 0, 1);
243 GEN_VX_VMUL10(vmul10ecuq, 1, 1);
245 #define GEN_VXFORM_V(name, vece, tcg_op, opc2, opc3) \
246 static void glue(gen_, name)(DisasContext *ctx) \
248 if (unlikely(!ctx->altivec_enabled)) { \
249 gen_exception(ctx, POWERPC_EXCP_VPU); \
250 return; \
253 tcg_op(vece, \
254 avr_full_offset(rD(ctx->opcode)), \
255 avr_full_offset(rA(ctx->opcode)), \
256 avr_full_offset(rB(ctx->opcode)), \
257 16, 16); \
260 /* Logical operations */
261 GEN_VXFORM_V(vand, MO_64, tcg_gen_gvec_and, 2, 16);
262 GEN_VXFORM_V(vandc, MO_64, tcg_gen_gvec_andc, 2, 17);
263 GEN_VXFORM_V(vor, MO_64, tcg_gen_gvec_or, 2, 18);
264 GEN_VXFORM_V(vxor, MO_64, tcg_gen_gvec_xor, 2, 19);
265 GEN_VXFORM_V(vnor, MO_64, tcg_gen_gvec_nor, 2, 20);
266 GEN_VXFORM_V(veqv, MO_64, tcg_gen_gvec_eqv, 2, 26);
267 GEN_VXFORM_V(vnand, MO_64, tcg_gen_gvec_nand, 2, 22);
268 GEN_VXFORM_V(vorc, MO_64, tcg_gen_gvec_orc, 2, 21);
270 #define GEN_VXFORM(name, opc2, opc3) \
271 static void glue(gen_, name)(DisasContext *ctx) \
273 TCGv_ptr ra, rb, rd; \
274 if (unlikely(!ctx->altivec_enabled)) { \
275 gen_exception(ctx, POWERPC_EXCP_VPU); \
276 return; \
278 ra = gen_avr_ptr(rA(ctx->opcode)); \
279 rb = gen_avr_ptr(rB(ctx->opcode)); \
280 rd = gen_avr_ptr(rD(ctx->opcode)); \
281 gen_helper_##name(rd, ra, rb); \
282 tcg_temp_free_ptr(ra); \
283 tcg_temp_free_ptr(rb); \
284 tcg_temp_free_ptr(rd); \
287 #define GEN_VXFORM_TRANS(name, opc2, opc3) \
288 static void glue(gen_, name)(DisasContext *ctx) \
290 if (unlikely(!ctx->altivec_enabled)) { \
291 gen_exception(ctx, POWERPC_EXCP_VPU); \
292 return; \
294 trans_##name(ctx); \
297 #define GEN_VXFORM_ENV(name, opc2, opc3) \
298 static void glue(gen_, name)(DisasContext *ctx) \
300 TCGv_ptr ra, rb, rd; \
301 if (unlikely(!ctx->altivec_enabled)) { \
302 gen_exception(ctx, POWERPC_EXCP_VPU); \
303 return; \
305 ra = gen_avr_ptr(rA(ctx->opcode)); \
306 rb = gen_avr_ptr(rB(ctx->opcode)); \
307 rd = gen_avr_ptr(rD(ctx->opcode)); \
308 gen_helper_##name(cpu_env, rd, ra, rb); \
309 tcg_temp_free_ptr(ra); \
310 tcg_temp_free_ptr(rb); \
311 tcg_temp_free_ptr(rd); \
314 #define GEN_VXFORM3(name, opc2, opc3) \
315 static void glue(gen_, name)(DisasContext *ctx) \
317 TCGv_ptr ra, rb, rc, rd; \
318 if (unlikely(!ctx->altivec_enabled)) { \
319 gen_exception(ctx, POWERPC_EXCP_VPU); \
320 return; \
322 ra = gen_avr_ptr(rA(ctx->opcode)); \
323 rb = gen_avr_ptr(rB(ctx->opcode)); \
324 rc = gen_avr_ptr(rC(ctx->opcode)); \
325 rd = gen_avr_ptr(rD(ctx->opcode)); \
326 gen_helper_##name(rd, ra, rb, rc); \
327 tcg_temp_free_ptr(ra); \
328 tcg_temp_free_ptr(rb); \
329 tcg_temp_free_ptr(rc); \
330 tcg_temp_free_ptr(rd); \
334 * Support for Altivec instruction pairs that use bit 31 (Rc) as
335 * an opcode bit. In general, these pairs come from different
336 * versions of the ISA, so we must also support a pair of flags for
337 * each instruction.
339 #define GEN_VXFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
340 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
342 if ((Rc(ctx->opcode) == 0) && \
343 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
344 gen_##name0(ctx); \
345 } else if ((Rc(ctx->opcode) == 1) && \
346 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
347 gen_##name1(ctx); \
348 } else { \
349 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
353 /* Adds support to provide invalid mask */
354 #define GEN_VXFORM_DUAL_EXT(name0, flg0, flg2_0, inval0, \
355 name1, flg1, flg2_1, inval1) \
356 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
358 if ((Rc(ctx->opcode) == 0) && \
359 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0)) && \
360 !(ctx->opcode & inval0)) { \
361 gen_##name0(ctx); \
362 } else if ((Rc(ctx->opcode) == 1) && \
363 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1)) && \
364 !(ctx->opcode & inval1)) { \
365 gen_##name1(ctx); \
366 } else { \
367 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
371 #define GEN_VXFORM_HETRO(name, opc2, opc3) \
372 static void glue(gen_, name)(DisasContext *ctx) \
374 TCGv_ptr rb; \
375 if (unlikely(!ctx->altivec_enabled)) { \
376 gen_exception(ctx, POWERPC_EXCP_VPU); \
377 return; \
379 rb = gen_avr_ptr(rB(ctx->opcode)); \
380 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], rb); \
381 tcg_temp_free_ptr(rb); \
384 GEN_VXFORM_V(vaddubm, MO_8, tcg_gen_gvec_add, 0, 0);
385 GEN_VXFORM_DUAL_EXT(vaddubm, PPC_ALTIVEC, PPC_NONE, 0, \
386 vmul10cuq, PPC_NONE, PPC2_ISA300, 0x0000F800)
387 GEN_VXFORM_V(vadduhm, MO_16, tcg_gen_gvec_add, 0, 1);
388 GEN_VXFORM_DUAL(vadduhm, PPC_ALTIVEC, PPC_NONE, \
389 vmul10ecuq, PPC_NONE, PPC2_ISA300)
390 GEN_VXFORM_V(vadduwm, MO_32, tcg_gen_gvec_add, 0, 2);
391 GEN_VXFORM_V(vaddudm, MO_64, tcg_gen_gvec_add, 0, 3);
392 GEN_VXFORM_V(vsububm, MO_8, tcg_gen_gvec_sub, 0, 16);
393 GEN_VXFORM_V(vsubuhm, MO_16, tcg_gen_gvec_sub, 0, 17);
394 GEN_VXFORM_V(vsubuwm, MO_32, tcg_gen_gvec_sub, 0, 18);
395 GEN_VXFORM_V(vsubudm, MO_64, tcg_gen_gvec_sub, 0, 19);
396 GEN_VXFORM_V(vmaxub, MO_8, tcg_gen_gvec_umax, 1, 0);
397 GEN_VXFORM_V(vmaxuh, MO_16, tcg_gen_gvec_umax, 1, 1);
398 GEN_VXFORM_V(vmaxuw, MO_32, tcg_gen_gvec_umax, 1, 2);
399 GEN_VXFORM_V(vmaxud, MO_64, tcg_gen_gvec_umax, 1, 3);
400 GEN_VXFORM_V(vmaxsb, MO_8, tcg_gen_gvec_smax, 1, 4);
401 GEN_VXFORM_V(vmaxsh, MO_16, tcg_gen_gvec_smax, 1, 5);
402 GEN_VXFORM_V(vmaxsw, MO_32, tcg_gen_gvec_smax, 1, 6);
403 GEN_VXFORM_V(vmaxsd, MO_64, tcg_gen_gvec_smax, 1, 7);
404 GEN_VXFORM_V(vminub, MO_8, tcg_gen_gvec_umin, 1, 8);
405 GEN_VXFORM_V(vminuh, MO_16, tcg_gen_gvec_umin, 1, 9);
406 GEN_VXFORM_V(vminuw, MO_32, tcg_gen_gvec_umin, 1, 10);
407 GEN_VXFORM_V(vminud, MO_64, tcg_gen_gvec_umin, 1, 11);
408 GEN_VXFORM_V(vminsb, MO_8, tcg_gen_gvec_smin, 1, 12);
409 GEN_VXFORM_V(vminsh, MO_16, tcg_gen_gvec_smin, 1, 13);
410 GEN_VXFORM_V(vminsw, MO_32, tcg_gen_gvec_smin, 1, 14);
411 GEN_VXFORM_V(vminsd, MO_64, tcg_gen_gvec_smin, 1, 15);
412 GEN_VXFORM(vavgub, 1, 16);
413 GEN_VXFORM(vabsdub, 1, 16);
414 GEN_VXFORM_DUAL(vavgub, PPC_ALTIVEC, PPC_NONE, \
415 vabsdub, PPC_NONE, PPC2_ISA300)
416 GEN_VXFORM(vavguh, 1, 17);
417 GEN_VXFORM(vabsduh, 1, 17);
418 GEN_VXFORM_DUAL(vavguh, PPC_ALTIVEC, PPC_NONE, \
419 vabsduh, PPC_NONE, PPC2_ISA300)
420 GEN_VXFORM(vavguw, 1, 18);
421 GEN_VXFORM(vabsduw, 1, 18);
422 GEN_VXFORM_DUAL(vavguw, PPC_ALTIVEC, PPC_NONE, \
423 vabsduw, PPC_NONE, PPC2_ISA300)
424 GEN_VXFORM(vavgsb, 1, 20);
425 GEN_VXFORM(vavgsh, 1, 21);
426 GEN_VXFORM(vavgsw, 1, 22);
427 GEN_VXFORM(vmrghb, 6, 0);
428 GEN_VXFORM(vmrghh, 6, 1);
429 GEN_VXFORM(vmrghw, 6, 2);
430 GEN_VXFORM(vmrglb, 6, 4);
431 GEN_VXFORM(vmrglh, 6, 5);
432 GEN_VXFORM(vmrglw, 6, 6);
434 static void gen_vmrgew(DisasContext *ctx)
436 TCGv_i64 tmp;
437 TCGv_i64 avr;
438 int VT, VA, VB;
439 if (unlikely(!ctx->altivec_enabled)) {
440 gen_exception(ctx, POWERPC_EXCP_VPU);
441 return;
443 VT = rD(ctx->opcode);
444 VA = rA(ctx->opcode);
445 VB = rB(ctx->opcode);
446 tmp = tcg_temp_new_i64();
447 avr = tcg_temp_new_i64();
449 get_avr64(avr, VB, true);
450 tcg_gen_shri_i64(tmp, avr, 32);
451 get_avr64(avr, VA, true);
452 tcg_gen_deposit_i64(avr, avr, tmp, 0, 32);
453 set_avr64(VT, avr, true);
455 get_avr64(avr, VB, false);
456 tcg_gen_shri_i64(tmp, avr, 32);
457 get_avr64(avr, VA, false);
458 tcg_gen_deposit_i64(avr, avr, tmp, 0, 32);
459 set_avr64(VT, avr, false);
461 tcg_temp_free_i64(tmp);
462 tcg_temp_free_i64(avr);
465 static void gen_vmrgow(DisasContext *ctx)
467 TCGv_i64 t0, t1;
468 TCGv_i64 avr;
469 int VT, VA, VB;
470 if (unlikely(!ctx->altivec_enabled)) {
471 gen_exception(ctx, POWERPC_EXCP_VPU);
472 return;
474 VT = rD(ctx->opcode);
475 VA = rA(ctx->opcode);
476 VB = rB(ctx->opcode);
477 t0 = tcg_temp_new_i64();
478 t1 = tcg_temp_new_i64();
479 avr = tcg_temp_new_i64();
481 get_avr64(t0, VB, true);
482 get_avr64(t1, VA, true);
483 tcg_gen_deposit_i64(avr, t0, t1, 32, 32);
484 set_avr64(VT, avr, true);
486 get_avr64(t0, VB, false);
487 get_avr64(t1, VA, false);
488 tcg_gen_deposit_i64(avr, t0, t1, 32, 32);
489 set_avr64(VT, avr, false);
491 tcg_temp_free_i64(t0);
492 tcg_temp_free_i64(t1);
493 tcg_temp_free_i64(avr);
497 * lvsl VRT,RA,RB - Load Vector for Shift Left
499 * Let the EA be the sum (rA|0)+(rB). Let sh=EA[28–31].
500 * Let X be the 32-byte value 0x00 || 0x01 || 0x02 || ... || 0x1E || 0x1F.
501 * Bytes sh:sh+15 of X are placed into vD.
503 static void trans_lvsl(DisasContext *ctx)
505 int VT = rD(ctx->opcode);
506 TCGv_i64 result = tcg_temp_new_i64();
507 TCGv_i64 sh = tcg_temp_new_i64();
508 TCGv EA = tcg_temp_new();
510 /* Get sh(from description) by anding EA with 0xf. */
511 gen_addr_reg_index(ctx, EA);
512 tcg_gen_extu_tl_i64(sh, EA);
513 tcg_gen_andi_i64(sh, sh, 0xfULL);
516 * Create bytes sh:sh+7 of X(from description) and place them in
517 * higher doubleword of vD.
519 tcg_gen_muli_i64(sh, sh, 0x0101010101010101ULL);
520 tcg_gen_addi_i64(result, sh, 0x0001020304050607ull);
521 set_avr64(VT, result, true);
523 * Create bytes sh+8:sh+15 of X(from description) and place them in
524 * lower doubleword of vD.
526 tcg_gen_addi_i64(result, sh, 0x08090a0b0c0d0e0fULL);
527 set_avr64(VT, result, false);
529 tcg_temp_free_i64(result);
530 tcg_temp_free_i64(sh);
531 tcg_temp_free(EA);
535 * lvsr VRT,RA,RB - Load Vector for Shift Right
537 * Let the EA be the sum (rA|0)+(rB). Let sh=EA[28–31].
538 * Let X be the 32-byte value 0x00 || 0x01 || 0x02 || ... || 0x1E || 0x1F.
539 * Bytes (16-sh):(31-sh) of X are placed into vD.
541 static void trans_lvsr(DisasContext *ctx)
543 int VT = rD(ctx->opcode);
544 TCGv_i64 result = tcg_temp_new_i64();
545 TCGv_i64 sh = tcg_temp_new_i64();
546 TCGv EA = tcg_temp_new();
549 /* Get sh(from description) by anding EA with 0xf. */
550 gen_addr_reg_index(ctx, EA);
551 tcg_gen_extu_tl_i64(sh, EA);
552 tcg_gen_andi_i64(sh, sh, 0xfULL);
555 * Create bytes (16-sh):(23-sh) of X(from description) and place them in
556 * higher doubleword of vD.
558 tcg_gen_muli_i64(sh, sh, 0x0101010101010101ULL);
559 tcg_gen_subfi_i64(result, 0x1011121314151617ULL, sh);
560 set_avr64(VT, result, true);
562 * Create bytes (24-sh):(32-sh) of X(from description) and place them in
563 * lower doubleword of vD.
565 tcg_gen_subfi_i64(result, 0x18191a1b1c1d1e1fULL, sh);
566 set_avr64(VT, result, false);
568 tcg_temp_free_i64(result);
569 tcg_temp_free_i64(sh);
570 tcg_temp_free(EA);
573 GEN_VXFORM(vmuloub, 4, 0);
574 GEN_VXFORM(vmulouh, 4, 1);
575 GEN_VXFORM(vmulouw, 4, 2);
576 GEN_VXFORM(vmuluwm, 4, 2);
577 GEN_VXFORM_DUAL(vmulouw, PPC_ALTIVEC, PPC_NONE,
578 vmuluwm, PPC_NONE, PPC2_ALTIVEC_207)
579 GEN_VXFORM(vmulosb, 4, 4);
580 GEN_VXFORM(vmulosh, 4, 5);
581 GEN_VXFORM(vmulosw, 4, 6);
582 GEN_VXFORM(vmuleub, 4, 8);
583 GEN_VXFORM(vmuleuh, 4, 9);
584 GEN_VXFORM(vmuleuw, 4, 10);
585 GEN_VXFORM(vmulesb, 4, 12);
586 GEN_VXFORM(vmulesh, 4, 13);
587 GEN_VXFORM(vmulesw, 4, 14);
588 GEN_VXFORM_V(vslb, MO_8, tcg_gen_gvec_shlv, 2, 4);
589 GEN_VXFORM_V(vslh, MO_16, tcg_gen_gvec_shlv, 2, 5);
590 GEN_VXFORM_V(vslw, MO_32, tcg_gen_gvec_shlv, 2, 6);
591 GEN_VXFORM(vrlwnm, 2, 6);
592 GEN_VXFORM_DUAL(vslw, PPC_ALTIVEC, PPC_NONE, \
593 vrlwnm, PPC_NONE, PPC2_ISA300)
594 GEN_VXFORM_V(vsld, MO_64, tcg_gen_gvec_shlv, 2, 23);
595 GEN_VXFORM_V(vsrb, MO_8, tcg_gen_gvec_shrv, 2, 8);
596 GEN_VXFORM_V(vsrh, MO_16, tcg_gen_gvec_shrv, 2, 9);
597 GEN_VXFORM_V(vsrw, MO_32, tcg_gen_gvec_shrv, 2, 10);
598 GEN_VXFORM_V(vsrd, MO_64, tcg_gen_gvec_shrv, 2, 27);
599 GEN_VXFORM_V(vsrab, MO_8, tcg_gen_gvec_sarv, 2, 12);
600 GEN_VXFORM_V(vsrah, MO_16, tcg_gen_gvec_sarv, 2, 13);
601 GEN_VXFORM_V(vsraw, MO_32, tcg_gen_gvec_sarv, 2, 14);
602 GEN_VXFORM_V(vsrad, MO_64, tcg_gen_gvec_sarv, 2, 15);
603 GEN_VXFORM(vsrv, 2, 28);
604 GEN_VXFORM(vslv, 2, 29);
605 GEN_VXFORM(vslo, 6, 16);
606 GEN_VXFORM(vsro, 6, 17);
607 GEN_VXFORM(vaddcuw, 0, 6);
608 GEN_VXFORM(vsubcuw, 0, 22);
610 #define GEN_VXFORM_SAT(NAME, VECE, NORM, SAT, OPC2, OPC3) \
611 static void glue(glue(gen_, NAME), _vec)(unsigned vece, TCGv_vec t, \
612 TCGv_vec sat, TCGv_vec a, \
613 TCGv_vec b) \
615 TCGv_vec x = tcg_temp_new_vec_matching(t); \
616 glue(glue(tcg_gen_, NORM), _vec)(VECE, x, a, b); \
617 glue(glue(tcg_gen_, SAT), _vec)(VECE, t, a, b); \
618 tcg_gen_cmp_vec(TCG_COND_NE, VECE, x, x, t); \
619 tcg_gen_or_vec(VECE, sat, sat, x); \
620 tcg_temp_free_vec(x); \
622 static void glue(gen_, NAME)(DisasContext *ctx) \
624 static const TCGOpcode vecop_list[] = { \
625 glue(glue(INDEX_op_, NORM), _vec), \
626 glue(glue(INDEX_op_, SAT), _vec), \
627 INDEX_op_cmp_vec, 0 \
628 }; \
629 static const GVecGen4 g = { \
630 .fniv = glue(glue(gen_, NAME), _vec), \
631 .fno = glue(gen_helper_, NAME), \
632 .opt_opc = vecop_list, \
633 .write_aofs = true, \
634 .vece = VECE, \
635 }; \
636 if (unlikely(!ctx->altivec_enabled)) { \
637 gen_exception(ctx, POWERPC_EXCP_VPU); \
638 return; \
640 tcg_gen_gvec_4(avr_full_offset(rD(ctx->opcode)), \
641 offsetof(CPUPPCState, vscr_sat), \
642 avr_full_offset(rA(ctx->opcode)), \
643 avr_full_offset(rB(ctx->opcode)), \
644 16, 16, &g); \
647 GEN_VXFORM_SAT(vaddubs, MO_8, add, usadd, 0, 8);
648 GEN_VXFORM_DUAL_EXT(vaddubs, PPC_ALTIVEC, PPC_NONE, 0, \
649 vmul10uq, PPC_NONE, PPC2_ISA300, 0x0000F800)
650 GEN_VXFORM_SAT(vadduhs, MO_16, add, usadd, 0, 9);
651 GEN_VXFORM_DUAL(vadduhs, PPC_ALTIVEC, PPC_NONE, \
652 vmul10euq, PPC_NONE, PPC2_ISA300)
653 GEN_VXFORM_SAT(vadduws, MO_32, add, usadd, 0, 10);
654 GEN_VXFORM_SAT(vaddsbs, MO_8, add, ssadd, 0, 12);
655 GEN_VXFORM_SAT(vaddshs, MO_16, add, ssadd, 0, 13);
656 GEN_VXFORM_SAT(vaddsws, MO_32, add, ssadd, 0, 14);
657 GEN_VXFORM_SAT(vsububs, MO_8, sub, ussub, 0, 24);
658 GEN_VXFORM_SAT(vsubuhs, MO_16, sub, ussub, 0, 25);
659 GEN_VXFORM_SAT(vsubuws, MO_32, sub, ussub, 0, 26);
660 GEN_VXFORM_SAT(vsubsbs, MO_8, sub, sssub, 0, 28);
661 GEN_VXFORM_SAT(vsubshs, MO_16, sub, sssub, 0, 29);
662 GEN_VXFORM_SAT(vsubsws, MO_32, sub, sssub, 0, 30);
663 GEN_VXFORM(vadduqm, 0, 4);
664 GEN_VXFORM(vaddcuq, 0, 5);
665 GEN_VXFORM3(vaddeuqm, 30, 0);
666 GEN_VXFORM3(vaddecuq, 30, 0);
667 GEN_VXFORM_DUAL(vaddeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
668 vaddecuq, PPC_NONE, PPC2_ALTIVEC_207)
669 GEN_VXFORM(vsubuqm, 0, 20);
670 GEN_VXFORM(vsubcuq, 0, 21);
671 GEN_VXFORM3(vsubeuqm, 31, 0);
672 GEN_VXFORM3(vsubecuq, 31, 0);
673 GEN_VXFORM_DUAL(vsubeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
674 vsubecuq, PPC_NONE, PPC2_ALTIVEC_207)
675 GEN_VXFORM(vrlb, 2, 0);
676 GEN_VXFORM(vrlh, 2, 1);
677 GEN_VXFORM(vrlw, 2, 2);
678 GEN_VXFORM(vrlwmi, 2, 2);
679 GEN_VXFORM_DUAL(vrlw, PPC_ALTIVEC, PPC_NONE, \
680 vrlwmi, PPC_NONE, PPC2_ISA300)
681 GEN_VXFORM(vrld, 2, 3);
682 GEN_VXFORM(vrldmi, 2, 3);
683 GEN_VXFORM_DUAL(vrld, PPC_NONE, PPC2_ALTIVEC_207, \
684 vrldmi, PPC_NONE, PPC2_ISA300)
685 GEN_VXFORM(vsl, 2, 7);
686 GEN_VXFORM(vrldnm, 2, 7);
687 GEN_VXFORM_DUAL(vsl, PPC_ALTIVEC, PPC_NONE, \
688 vrldnm, PPC_NONE, PPC2_ISA300)
689 GEN_VXFORM(vsr, 2, 11);
690 GEN_VXFORM_ENV(vpkuhum, 7, 0);
691 GEN_VXFORM_ENV(vpkuwum, 7, 1);
692 GEN_VXFORM_ENV(vpkudum, 7, 17);
693 GEN_VXFORM_ENV(vpkuhus, 7, 2);
694 GEN_VXFORM_ENV(vpkuwus, 7, 3);
695 GEN_VXFORM_ENV(vpkudus, 7, 19);
696 GEN_VXFORM_ENV(vpkshus, 7, 4);
697 GEN_VXFORM_ENV(vpkswus, 7, 5);
698 GEN_VXFORM_ENV(vpksdus, 7, 21);
699 GEN_VXFORM_ENV(vpkshss, 7, 6);
700 GEN_VXFORM_ENV(vpkswss, 7, 7);
701 GEN_VXFORM_ENV(vpksdss, 7, 23);
702 GEN_VXFORM(vpkpx, 7, 12);
703 GEN_VXFORM_ENV(vsum4ubs, 4, 24);
704 GEN_VXFORM_ENV(vsum4sbs, 4, 28);
705 GEN_VXFORM_ENV(vsum4shs, 4, 25);
706 GEN_VXFORM_ENV(vsum2sws, 4, 26);
707 GEN_VXFORM_ENV(vsumsws, 4, 30);
708 GEN_VXFORM_ENV(vaddfp, 5, 0);
709 GEN_VXFORM_ENV(vsubfp, 5, 1);
710 GEN_VXFORM_ENV(vmaxfp, 5, 16);
711 GEN_VXFORM_ENV(vminfp, 5, 17);
712 GEN_VXFORM_HETRO(vextublx, 6, 24)
713 GEN_VXFORM_HETRO(vextuhlx, 6, 25)
714 GEN_VXFORM_HETRO(vextuwlx, 6, 26)
715 GEN_VXFORM_DUAL(vmrgow, PPC_NONE, PPC2_ALTIVEC_207,
716 vextuwlx, PPC_NONE, PPC2_ISA300)
717 GEN_VXFORM_HETRO(vextubrx, 6, 28)
718 GEN_VXFORM_HETRO(vextuhrx, 6, 29)
719 GEN_VXFORM_HETRO(vextuwrx, 6, 30)
720 GEN_VXFORM_TRANS(lvsl, 6, 31)
721 GEN_VXFORM_TRANS(lvsr, 6, 32)
722 GEN_VXFORM_DUAL(vmrgew, PPC_NONE, PPC2_ALTIVEC_207, \
723 vextuwrx, PPC_NONE, PPC2_ISA300)
725 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
726 static void glue(gen_, name)(DisasContext *ctx) \
728 TCGv_ptr ra, rb, rd; \
729 if (unlikely(!ctx->altivec_enabled)) { \
730 gen_exception(ctx, POWERPC_EXCP_VPU); \
731 return; \
733 ra = gen_avr_ptr(rA(ctx->opcode)); \
734 rb = gen_avr_ptr(rB(ctx->opcode)); \
735 rd = gen_avr_ptr(rD(ctx->opcode)); \
736 gen_helper_##opname(cpu_env, rd, ra, rb); \
737 tcg_temp_free_ptr(ra); \
738 tcg_temp_free_ptr(rb); \
739 tcg_temp_free_ptr(rd); \
742 #define GEN_VXRFORM(name, opc2, opc3) \
743 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
744 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
747 * Support for Altivec instructions that use bit 31 (Rc) as an opcode
748 * bit but also use bit 21 as an actual Rc bit. In general, thse pairs
749 * come from different versions of the ISA, so we must also support a
750 * pair of flags for each instruction.
752 #define GEN_VXRFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
753 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
755 if ((Rc(ctx->opcode) == 0) && \
756 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
757 if (Rc21(ctx->opcode) == 0) { \
758 gen_##name0(ctx); \
759 } else { \
760 gen_##name0##_(ctx); \
762 } else if ((Rc(ctx->opcode) == 1) && \
763 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
764 if (Rc21(ctx->opcode) == 0) { \
765 gen_##name1(ctx); \
766 } else { \
767 gen_##name1##_(ctx); \
769 } else { \
770 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
774 GEN_VXRFORM(vcmpequb, 3, 0)
775 GEN_VXRFORM(vcmpequh, 3, 1)
776 GEN_VXRFORM(vcmpequw, 3, 2)
777 GEN_VXRFORM(vcmpequd, 3, 3)
778 GEN_VXRFORM(vcmpnezb, 3, 4)
779 GEN_VXRFORM(vcmpnezh, 3, 5)
780 GEN_VXRFORM(vcmpnezw, 3, 6)
781 GEN_VXRFORM(vcmpgtsb, 3, 12)
782 GEN_VXRFORM(vcmpgtsh, 3, 13)
783 GEN_VXRFORM(vcmpgtsw, 3, 14)
784 GEN_VXRFORM(vcmpgtsd, 3, 15)
785 GEN_VXRFORM(vcmpgtub, 3, 8)
786 GEN_VXRFORM(vcmpgtuh, 3, 9)
787 GEN_VXRFORM(vcmpgtuw, 3, 10)
788 GEN_VXRFORM(vcmpgtud, 3, 11)
789 GEN_VXRFORM(vcmpeqfp, 3, 3)
790 GEN_VXRFORM(vcmpgefp, 3, 7)
791 GEN_VXRFORM(vcmpgtfp, 3, 11)
792 GEN_VXRFORM(vcmpbfp, 3, 15)
793 GEN_VXRFORM(vcmpneb, 3, 0)
794 GEN_VXRFORM(vcmpneh, 3, 1)
795 GEN_VXRFORM(vcmpnew, 3, 2)
797 GEN_VXRFORM_DUAL(vcmpequb, PPC_ALTIVEC, PPC_NONE, \
798 vcmpneb, PPC_NONE, PPC2_ISA300)
799 GEN_VXRFORM_DUAL(vcmpequh, PPC_ALTIVEC, PPC_NONE, \
800 vcmpneh, PPC_NONE, PPC2_ISA300)
801 GEN_VXRFORM_DUAL(vcmpequw, PPC_ALTIVEC, PPC_NONE, \
802 vcmpnew, PPC_NONE, PPC2_ISA300)
803 GEN_VXRFORM_DUAL(vcmpeqfp, PPC_ALTIVEC, PPC_NONE, \
804 vcmpequd, PPC_NONE, PPC2_ALTIVEC_207)
805 GEN_VXRFORM_DUAL(vcmpbfp, PPC_ALTIVEC, PPC_NONE, \
806 vcmpgtsd, PPC_NONE, PPC2_ALTIVEC_207)
807 GEN_VXRFORM_DUAL(vcmpgtfp, PPC_ALTIVEC, PPC_NONE, \
808 vcmpgtud, PPC_NONE, PPC2_ALTIVEC_207)
810 #define GEN_VXFORM_DUPI(name, tcg_op, opc2, opc3) \
811 static void glue(gen_, name)(DisasContext *ctx) \
813 int simm; \
814 if (unlikely(!ctx->altivec_enabled)) { \
815 gen_exception(ctx, POWERPC_EXCP_VPU); \
816 return; \
818 simm = SIMM5(ctx->opcode); \
819 tcg_op(avr_full_offset(rD(ctx->opcode)), 16, 16, simm); \
822 GEN_VXFORM_DUPI(vspltisb, tcg_gen_gvec_dup8i, 6, 12);
823 GEN_VXFORM_DUPI(vspltish, tcg_gen_gvec_dup16i, 6, 13);
824 GEN_VXFORM_DUPI(vspltisw, tcg_gen_gvec_dup32i, 6, 14);
826 #define GEN_VXFORM_NOA(name, opc2, opc3) \
827 static void glue(gen_, name)(DisasContext *ctx) \
829 TCGv_ptr rb, rd; \
830 if (unlikely(!ctx->altivec_enabled)) { \
831 gen_exception(ctx, POWERPC_EXCP_VPU); \
832 return; \
834 rb = gen_avr_ptr(rB(ctx->opcode)); \
835 rd = gen_avr_ptr(rD(ctx->opcode)); \
836 gen_helper_##name(rd, rb); \
837 tcg_temp_free_ptr(rb); \
838 tcg_temp_free_ptr(rd); \
841 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
842 static void glue(gen_, name)(DisasContext *ctx) \
844 TCGv_ptr rb, rd; \
846 if (unlikely(!ctx->altivec_enabled)) { \
847 gen_exception(ctx, POWERPC_EXCP_VPU); \
848 return; \
850 rb = gen_avr_ptr(rB(ctx->opcode)); \
851 rd = gen_avr_ptr(rD(ctx->opcode)); \
852 gen_helper_##name(cpu_env, rd, rb); \
853 tcg_temp_free_ptr(rb); \
854 tcg_temp_free_ptr(rd); \
857 #define GEN_VXFORM_NOA_2(name, opc2, opc3, opc4) \
858 static void glue(gen_, name)(DisasContext *ctx) \
860 TCGv_ptr rb, rd; \
861 if (unlikely(!ctx->altivec_enabled)) { \
862 gen_exception(ctx, POWERPC_EXCP_VPU); \
863 return; \
865 rb = gen_avr_ptr(rB(ctx->opcode)); \
866 rd = gen_avr_ptr(rD(ctx->opcode)); \
867 gen_helper_##name(rd, rb); \
868 tcg_temp_free_ptr(rb); \
869 tcg_temp_free_ptr(rd); \
872 #define GEN_VXFORM_NOA_3(name, opc2, opc3, opc4) \
873 static void glue(gen_, name)(DisasContext *ctx) \
875 TCGv_ptr rb; \
876 if (unlikely(!ctx->altivec_enabled)) { \
877 gen_exception(ctx, POWERPC_EXCP_VPU); \
878 return; \
880 rb = gen_avr_ptr(rB(ctx->opcode)); \
881 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], rb); \
882 tcg_temp_free_ptr(rb); \
884 GEN_VXFORM_NOA(vupkhsb, 7, 8);
885 GEN_VXFORM_NOA(vupkhsh, 7, 9);
886 GEN_VXFORM_NOA(vupkhsw, 7, 25);
887 GEN_VXFORM_NOA(vupklsb, 7, 10);
888 GEN_VXFORM_NOA(vupklsh, 7, 11);
889 GEN_VXFORM_NOA(vupklsw, 7, 27);
890 GEN_VXFORM_NOA(vupkhpx, 7, 13);
891 GEN_VXFORM_NOA(vupklpx, 7, 15);
892 GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
893 GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
894 GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
895 GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
896 GEN_VXFORM_NOA_ENV(vrfim, 5, 11);
897 GEN_VXFORM_NOA_ENV(vrfin, 5, 8);
898 GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
899 GEN_VXFORM_NOA_ENV(vrfiz, 5, 9);
900 GEN_VXFORM_NOA(vprtybw, 1, 24);
901 GEN_VXFORM_NOA(vprtybd, 1, 24);
902 GEN_VXFORM_NOA(vprtybq, 1, 24);
904 static void gen_vsplt(DisasContext *ctx, int vece)
906 int uimm, dofs, bofs;
908 if (unlikely(!ctx->altivec_enabled)) {
909 gen_exception(ctx, POWERPC_EXCP_VPU);
910 return;
913 uimm = UIMM5(ctx->opcode);
914 bofs = avr_full_offset(rB(ctx->opcode));
915 dofs = avr_full_offset(rD(ctx->opcode));
917 /* Experimental testing shows that hardware masks the immediate. */
918 bofs += (uimm << vece) & 15;
919 #ifndef HOST_WORDS_BIGENDIAN
920 bofs ^= 15;
921 bofs &= ~((1 << vece) - 1);
922 #endif
924 tcg_gen_gvec_dup_mem(vece, dofs, bofs, 16, 16);
927 #define GEN_VXFORM_VSPLT(name, vece, opc2, opc3) \
928 static void glue(gen_, name)(DisasContext *ctx) { gen_vsplt(ctx, vece); }
930 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
931 static void glue(gen_, name)(DisasContext *ctx) \
933 TCGv_ptr rb, rd; \
934 TCGv_i32 uimm; \
936 if (unlikely(!ctx->altivec_enabled)) { \
937 gen_exception(ctx, POWERPC_EXCP_VPU); \
938 return; \
940 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
941 rb = gen_avr_ptr(rB(ctx->opcode)); \
942 rd = gen_avr_ptr(rD(ctx->opcode)); \
943 gen_helper_##name(cpu_env, rd, rb, uimm); \
944 tcg_temp_free_i32(uimm); \
945 tcg_temp_free_ptr(rb); \
946 tcg_temp_free_ptr(rd); \
949 #define GEN_VXFORM_UIMM_SPLAT(name, opc2, opc3, splat_max) \
950 static void glue(gen_, name)(DisasContext *ctx) \
952 TCGv_ptr rb, rd; \
953 uint8_t uimm = UIMM4(ctx->opcode); \
954 TCGv_i32 t0; \
955 if (unlikely(!ctx->altivec_enabled)) { \
956 gen_exception(ctx, POWERPC_EXCP_VPU); \
957 return; \
959 if (uimm > splat_max) { \
960 uimm = 0; \
962 t0 = tcg_temp_new_i32(); \
963 tcg_gen_movi_i32(t0, uimm); \
964 rb = gen_avr_ptr(rB(ctx->opcode)); \
965 rd = gen_avr_ptr(rD(ctx->opcode)); \
966 gen_helper_##name(rd, rb, t0); \
967 tcg_temp_free_i32(t0); \
968 tcg_temp_free_ptr(rb); \
969 tcg_temp_free_ptr(rd); \
972 GEN_VXFORM_VSPLT(vspltb, MO_8, 6, 8);
973 GEN_VXFORM_VSPLT(vsplth, MO_16, 6, 9);
974 GEN_VXFORM_VSPLT(vspltw, MO_32, 6, 10);
975 GEN_VXFORM_UIMM_SPLAT(vextractub, 6, 8, 15);
976 GEN_VXFORM_UIMM_SPLAT(vextractuh, 6, 9, 14);
977 GEN_VXFORM_UIMM_SPLAT(vextractuw, 6, 10, 12);
978 GEN_VXFORM_UIMM_SPLAT(vextractd, 6, 11, 8);
979 GEN_VXFORM_UIMM_SPLAT(vinsertb, 6, 12, 15);
980 GEN_VXFORM_UIMM_SPLAT(vinserth, 6, 13, 14);
981 GEN_VXFORM_UIMM_SPLAT(vinsertw, 6, 14, 12);
982 GEN_VXFORM_UIMM_SPLAT(vinsertd, 6, 15, 8);
983 GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
984 GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
985 GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
986 GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
987 GEN_VXFORM_DUAL(vspltb, PPC_ALTIVEC, PPC_NONE,
988 vextractub, PPC_NONE, PPC2_ISA300);
989 GEN_VXFORM_DUAL(vsplth, PPC_ALTIVEC, PPC_NONE,
990 vextractuh, PPC_NONE, PPC2_ISA300);
991 GEN_VXFORM_DUAL(vspltw, PPC_ALTIVEC, PPC_NONE,
992 vextractuw, PPC_NONE, PPC2_ISA300);
993 GEN_VXFORM_DUAL(vspltisb, PPC_ALTIVEC, PPC_NONE,
994 vinsertb, PPC_NONE, PPC2_ISA300);
995 GEN_VXFORM_DUAL(vspltish, PPC_ALTIVEC, PPC_NONE,
996 vinserth, PPC_NONE, PPC2_ISA300);
997 GEN_VXFORM_DUAL(vspltisw, PPC_ALTIVEC, PPC_NONE,
998 vinsertw, PPC_NONE, PPC2_ISA300);
1000 static void gen_vsldoi(DisasContext *ctx)
1002 TCGv_ptr ra, rb, rd;
1003 TCGv_i32 sh;
1004 if (unlikely(!ctx->altivec_enabled)) {
1005 gen_exception(ctx, POWERPC_EXCP_VPU);
1006 return;
1008 ra = gen_avr_ptr(rA(ctx->opcode));
1009 rb = gen_avr_ptr(rB(ctx->opcode));
1010 rd = gen_avr_ptr(rD(ctx->opcode));
1011 sh = tcg_const_i32(VSH(ctx->opcode));
1012 gen_helper_vsldoi(rd, ra, rb, sh);
1013 tcg_temp_free_ptr(ra);
1014 tcg_temp_free_ptr(rb);
1015 tcg_temp_free_ptr(rd);
1016 tcg_temp_free_i32(sh);
1019 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
1020 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
1022 TCGv_ptr ra, rb, rc, rd; \
1023 if (unlikely(!ctx->altivec_enabled)) { \
1024 gen_exception(ctx, POWERPC_EXCP_VPU); \
1025 return; \
1027 ra = gen_avr_ptr(rA(ctx->opcode)); \
1028 rb = gen_avr_ptr(rB(ctx->opcode)); \
1029 rc = gen_avr_ptr(rC(ctx->opcode)); \
1030 rd = gen_avr_ptr(rD(ctx->opcode)); \
1031 if (Rc(ctx->opcode)) { \
1032 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
1033 } else { \
1034 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
1036 tcg_temp_free_ptr(ra); \
1037 tcg_temp_free_ptr(rb); \
1038 tcg_temp_free_ptr(rc); \
1039 tcg_temp_free_ptr(rd); \
1042 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
1044 static void gen_vmladduhm(DisasContext *ctx)
1046 TCGv_ptr ra, rb, rc, rd;
1047 if (unlikely(!ctx->altivec_enabled)) {
1048 gen_exception(ctx, POWERPC_EXCP_VPU);
1049 return;
1051 ra = gen_avr_ptr(rA(ctx->opcode));
1052 rb = gen_avr_ptr(rB(ctx->opcode));
1053 rc = gen_avr_ptr(rC(ctx->opcode));
1054 rd = gen_avr_ptr(rD(ctx->opcode));
1055 gen_helper_vmladduhm(rd, ra, rb, rc);
1056 tcg_temp_free_ptr(ra);
1057 tcg_temp_free_ptr(rb);
1058 tcg_temp_free_ptr(rc);
1059 tcg_temp_free_ptr(rd);
1062 static void gen_vpermr(DisasContext *ctx)
1064 TCGv_ptr ra, rb, rc, rd;
1065 if (unlikely(!ctx->altivec_enabled)) {
1066 gen_exception(ctx, POWERPC_EXCP_VPU);
1067 return;
1069 ra = gen_avr_ptr(rA(ctx->opcode));
1070 rb = gen_avr_ptr(rB(ctx->opcode));
1071 rc = gen_avr_ptr(rC(ctx->opcode));
1072 rd = gen_avr_ptr(rD(ctx->opcode));
1073 gen_helper_vpermr(cpu_env, rd, ra, rb, rc);
1074 tcg_temp_free_ptr(ra);
1075 tcg_temp_free_ptr(rb);
1076 tcg_temp_free_ptr(rc);
1077 tcg_temp_free_ptr(rd);
1080 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
1081 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
1082 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
1083 GEN_VAFORM_PAIRED(vsel, vperm, 21)
1084 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
1086 GEN_VXFORM_NOA(vclzb, 1, 28)
1087 GEN_VXFORM_NOA(vclzh, 1, 29)
1088 GEN_VXFORM_NOA(vclzw, 1, 30)
1089 GEN_VXFORM_NOA(vclzd, 1, 31)
1090 GEN_VXFORM_NOA_2(vnegw, 1, 24, 6)
1091 GEN_VXFORM_NOA_2(vnegd, 1, 24, 7)
1092 GEN_VXFORM_NOA_2(vextsb2w, 1, 24, 16)
1093 GEN_VXFORM_NOA_2(vextsh2w, 1, 24, 17)
1094 GEN_VXFORM_NOA_2(vextsb2d, 1, 24, 24)
1095 GEN_VXFORM_NOA_2(vextsh2d, 1, 24, 25)
1096 GEN_VXFORM_NOA_2(vextsw2d, 1, 24, 26)
1097 GEN_VXFORM_NOA_2(vctzb, 1, 24, 28)
1098 GEN_VXFORM_NOA_2(vctzh, 1, 24, 29)
1099 GEN_VXFORM_NOA_2(vctzw, 1, 24, 30)
1100 GEN_VXFORM_NOA_2(vctzd, 1, 24, 31)
1101 GEN_VXFORM_NOA_3(vclzlsbb, 1, 24, 0)
1102 GEN_VXFORM_NOA_3(vctzlsbb, 1, 24, 1)
1103 GEN_VXFORM_NOA(vpopcntb, 1, 28)
1104 GEN_VXFORM_NOA(vpopcnth, 1, 29)
1105 GEN_VXFORM_NOA(vpopcntw, 1, 30)
1106 GEN_VXFORM_NOA(vpopcntd, 1, 31)
1107 GEN_VXFORM_DUAL(vclzb, PPC_NONE, PPC2_ALTIVEC_207, \
1108 vpopcntb, PPC_NONE, PPC2_ALTIVEC_207)
1109 GEN_VXFORM_DUAL(vclzh, PPC_NONE, PPC2_ALTIVEC_207, \
1110 vpopcnth, PPC_NONE, PPC2_ALTIVEC_207)
1111 GEN_VXFORM_DUAL(vclzw, PPC_NONE, PPC2_ALTIVEC_207, \
1112 vpopcntw, PPC_NONE, PPC2_ALTIVEC_207)
1113 GEN_VXFORM_DUAL(vclzd, PPC_NONE, PPC2_ALTIVEC_207, \
1114 vpopcntd, PPC_NONE, PPC2_ALTIVEC_207)
1115 GEN_VXFORM(vbpermd, 6, 23);
1116 GEN_VXFORM(vbpermq, 6, 21);
1117 GEN_VXFORM_NOA(vgbbd, 6, 20);
1118 GEN_VXFORM(vpmsumb, 4, 16)
1119 GEN_VXFORM(vpmsumh, 4, 17)
1120 GEN_VXFORM(vpmsumw, 4, 18)
1121 GEN_VXFORM(vpmsumd, 4, 19)
1123 #define GEN_BCD(op) \
1124 static void gen_##op(DisasContext *ctx) \
1126 TCGv_ptr ra, rb, rd; \
1127 TCGv_i32 ps; \
1129 if (unlikely(!ctx->altivec_enabled)) { \
1130 gen_exception(ctx, POWERPC_EXCP_VPU); \
1131 return; \
1134 ra = gen_avr_ptr(rA(ctx->opcode)); \
1135 rb = gen_avr_ptr(rB(ctx->opcode)); \
1136 rd = gen_avr_ptr(rD(ctx->opcode)); \
1138 ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
1140 gen_helper_##op(cpu_crf[6], rd, ra, rb, ps); \
1142 tcg_temp_free_ptr(ra); \
1143 tcg_temp_free_ptr(rb); \
1144 tcg_temp_free_ptr(rd); \
1145 tcg_temp_free_i32(ps); \
1148 #define GEN_BCD2(op) \
1149 static void gen_##op(DisasContext *ctx) \
1151 TCGv_ptr rd, rb; \
1152 TCGv_i32 ps; \
1154 if (unlikely(!ctx->altivec_enabled)) { \
1155 gen_exception(ctx, POWERPC_EXCP_VPU); \
1156 return; \
1159 rb = gen_avr_ptr(rB(ctx->opcode)); \
1160 rd = gen_avr_ptr(rD(ctx->opcode)); \
1162 ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
1164 gen_helper_##op(cpu_crf[6], rd, rb, ps); \
1166 tcg_temp_free_ptr(rb); \
1167 tcg_temp_free_ptr(rd); \
1168 tcg_temp_free_i32(ps); \
1171 GEN_BCD(bcdadd)
1172 GEN_BCD(bcdsub)
1173 GEN_BCD2(bcdcfn)
1174 GEN_BCD2(bcdctn)
1175 GEN_BCD2(bcdcfz)
1176 GEN_BCD2(bcdctz)
1177 GEN_BCD2(bcdcfsq)
1178 GEN_BCD2(bcdctsq)
1179 GEN_BCD2(bcdsetsgn)
1180 GEN_BCD(bcdcpsgn);
1181 GEN_BCD(bcds);
1182 GEN_BCD(bcdus);
1183 GEN_BCD(bcdsr);
1184 GEN_BCD(bcdtrunc);
1185 GEN_BCD(bcdutrunc);
1187 static void gen_xpnd04_1(DisasContext *ctx)
1189 switch (opc4(ctx->opcode)) {
1190 case 0:
1191 gen_bcdctsq(ctx);
1192 break;
1193 case 2:
1194 gen_bcdcfsq(ctx);
1195 break;
1196 case 4:
1197 gen_bcdctz(ctx);
1198 break;
1199 case 5:
1200 gen_bcdctn(ctx);
1201 break;
1202 case 6:
1203 gen_bcdcfz(ctx);
1204 break;
1205 case 7:
1206 gen_bcdcfn(ctx);
1207 break;
1208 case 31:
1209 gen_bcdsetsgn(ctx);
1210 break;
1211 default:
1212 gen_invalid(ctx);
1213 break;
1217 static void gen_xpnd04_2(DisasContext *ctx)
1219 switch (opc4(ctx->opcode)) {
1220 case 0:
1221 gen_bcdctsq(ctx);
1222 break;
1223 case 2:
1224 gen_bcdcfsq(ctx);
1225 break;
1226 case 4:
1227 gen_bcdctz(ctx);
1228 break;
1229 case 6:
1230 gen_bcdcfz(ctx);
1231 break;
1232 case 7:
1233 gen_bcdcfn(ctx);
1234 break;
1235 case 31:
1236 gen_bcdsetsgn(ctx);
1237 break;
1238 default:
1239 gen_invalid(ctx);
1240 break;
1245 GEN_VXFORM_DUAL(vsubcuw, PPC_ALTIVEC, PPC_NONE, \
1246 xpnd04_1, PPC_NONE, PPC2_ISA300)
1247 GEN_VXFORM_DUAL(vsubsws, PPC_ALTIVEC, PPC_NONE, \
1248 xpnd04_2, PPC_NONE, PPC2_ISA300)
1250 GEN_VXFORM_DUAL(vsububm, PPC_ALTIVEC, PPC_NONE, \
1251 bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
1252 GEN_VXFORM_DUAL(vsububs, PPC_ALTIVEC, PPC_NONE, \
1253 bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
1254 GEN_VXFORM_DUAL(vsubuhm, PPC_ALTIVEC, PPC_NONE, \
1255 bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
1256 GEN_VXFORM_DUAL(vsubuhs, PPC_ALTIVEC, PPC_NONE, \
1257 bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
1258 GEN_VXFORM_DUAL(vaddshs, PPC_ALTIVEC, PPC_NONE, \
1259 bcdcpsgn, PPC_NONE, PPC2_ISA300)
1260 GEN_VXFORM_DUAL(vsubudm, PPC2_ALTIVEC_207, PPC_NONE, \
1261 bcds, PPC_NONE, PPC2_ISA300)
1262 GEN_VXFORM_DUAL(vsubuwm, PPC_ALTIVEC, PPC_NONE, \
1263 bcdus, PPC_NONE, PPC2_ISA300)
1264 GEN_VXFORM_DUAL(vsubsbs, PPC_ALTIVEC, PPC_NONE, \
1265 bcdtrunc, PPC_NONE, PPC2_ISA300)
1266 GEN_VXFORM_DUAL(vsubuqm, PPC2_ALTIVEC_207, PPC_NONE, \
1267 bcdtrunc, PPC_NONE, PPC2_ISA300)
1268 GEN_VXFORM_DUAL(vsubcuq, PPC2_ALTIVEC_207, PPC_NONE, \
1269 bcdutrunc, PPC_NONE, PPC2_ISA300)
1272 static void gen_vsbox(DisasContext *ctx)
1274 TCGv_ptr ra, rd;
1275 if (unlikely(!ctx->altivec_enabled)) {
1276 gen_exception(ctx, POWERPC_EXCP_VPU);
1277 return;
1279 ra = gen_avr_ptr(rA(ctx->opcode));
1280 rd = gen_avr_ptr(rD(ctx->opcode));
1281 gen_helper_vsbox(rd, ra);
1282 tcg_temp_free_ptr(ra);
1283 tcg_temp_free_ptr(rd);
1286 GEN_VXFORM(vcipher, 4, 20)
1287 GEN_VXFORM(vcipherlast, 4, 20)
1288 GEN_VXFORM(vncipher, 4, 21)
1289 GEN_VXFORM(vncipherlast, 4, 21)
1291 GEN_VXFORM_DUAL(vcipher, PPC_NONE, PPC2_ALTIVEC_207,
1292 vcipherlast, PPC_NONE, PPC2_ALTIVEC_207)
1293 GEN_VXFORM_DUAL(vncipher, PPC_NONE, PPC2_ALTIVEC_207,
1294 vncipherlast, PPC_NONE, PPC2_ALTIVEC_207)
1296 #define VSHASIGMA(op) \
1297 static void gen_##op(DisasContext *ctx) \
1299 TCGv_ptr ra, rd; \
1300 TCGv_i32 st_six; \
1301 if (unlikely(!ctx->altivec_enabled)) { \
1302 gen_exception(ctx, POWERPC_EXCP_VPU); \
1303 return; \
1305 ra = gen_avr_ptr(rA(ctx->opcode)); \
1306 rd = gen_avr_ptr(rD(ctx->opcode)); \
1307 st_six = tcg_const_i32(rB(ctx->opcode)); \
1308 gen_helper_##op(rd, ra, st_six); \
1309 tcg_temp_free_ptr(ra); \
1310 tcg_temp_free_ptr(rd); \
1311 tcg_temp_free_i32(st_six); \
1314 VSHASIGMA(vshasigmaw)
1315 VSHASIGMA(vshasigmad)
1317 GEN_VXFORM3(vpermxor, 22, 0xFF)
1318 GEN_VXFORM_DUAL(vsldoi, PPC_ALTIVEC, PPC_NONE,
1319 vpermxor, PPC_NONE, PPC2_ALTIVEC_207)
1321 #undef GEN_VR_LDX
1322 #undef GEN_VR_STX
1323 #undef GEN_VR_LVE
1324 #undef GEN_VR_STVE
1326 #undef GEN_VX_LOGICAL
1327 #undef GEN_VX_LOGICAL_207
1328 #undef GEN_VXFORM
1329 #undef GEN_VXFORM_207
1330 #undef GEN_VXFORM_DUAL
1331 #undef GEN_VXRFORM_DUAL
1332 #undef GEN_VXRFORM1
1333 #undef GEN_VXRFORM
1334 #undef GEN_VXFORM_DUPI
1335 #undef GEN_VXFORM_NOA
1336 #undef GEN_VXFORM_UIMM
1337 #undef GEN_VAFORM_PAIRED
1339 #undef GEN_BCD2