trace: forbid use of %m in trace event format strings
[qemu/ar7.git] / target / ppc / translate / vmx-impl.inc.c
blobf99d0284c29571fad22e018bdf65b50cb2de6475
1 /*
2 * translate/vmx-impl.c
4 * Altivec/VMX translation
5 */
7 /*** Altivec vector extension ***/
8 /* Altivec registers moves */
10 static inline TCGv_ptr gen_avr_ptr(int reg)
12 TCGv_ptr r = tcg_temp_new_ptr();
13 tcg_gen_addi_ptr(r, cpu_env, offsetof(CPUPPCState, vsr[32 + reg].u64[0]));
14 return r;
17 static inline long avr64_offset(int reg, bool high)
19 return offsetof(CPUPPCState, vsr[32 + reg].u64[(high ? 0 : 1)]);
22 #define GEN_VR_LDX(name, opc2, opc3) \
23 static void glue(gen_, name)(DisasContext *ctx) \
24 { \
25 TCGv EA; \
26 TCGv_i64 avr; \
27 if (unlikely(!ctx->altivec_enabled)) { \
28 gen_exception(ctx, POWERPC_EXCP_VPU); \
29 return; \
30 } \
31 gen_set_access_type(ctx, ACCESS_INT); \
32 avr = tcg_temp_new_i64(); \
33 EA = tcg_temp_new(); \
34 gen_addr_reg_index(ctx, EA); \
35 tcg_gen_andi_tl(EA, EA, ~0xf); \
36 /* We only need to swap high and low halves. gen_qemu_ld64_i64 does \
37 necessary 64-bit byteswap already. */ \
38 if (ctx->le_mode) { \
39 gen_qemu_ld64_i64(ctx, avr, EA); \
40 set_avr64(rD(ctx->opcode), avr, false); \
41 tcg_gen_addi_tl(EA, EA, 8); \
42 gen_qemu_ld64_i64(ctx, avr, EA); \
43 set_avr64(rD(ctx->opcode), avr, true); \
44 } else { \
45 gen_qemu_ld64_i64(ctx, avr, EA); \
46 set_avr64(rD(ctx->opcode), avr, true); \
47 tcg_gen_addi_tl(EA, EA, 8); \
48 gen_qemu_ld64_i64(ctx, avr, EA); \
49 set_avr64(rD(ctx->opcode), avr, false); \
50 } \
51 tcg_temp_free(EA); \
52 tcg_temp_free_i64(avr); \
55 #define GEN_VR_STX(name, opc2, opc3) \
56 static void gen_st##name(DisasContext *ctx) \
57 { \
58 TCGv EA; \
59 TCGv_i64 avr; \
60 if (unlikely(!ctx->altivec_enabled)) { \
61 gen_exception(ctx, POWERPC_EXCP_VPU); \
62 return; \
63 } \
64 gen_set_access_type(ctx, ACCESS_INT); \
65 avr = tcg_temp_new_i64(); \
66 EA = tcg_temp_new(); \
67 gen_addr_reg_index(ctx, EA); \
68 tcg_gen_andi_tl(EA, EA, ~0xf); \
69 /* We only need to swap high and low halves. gen_qemu_st64_i64 does \
70 necessary 64-bit byteswap already. */ \
71 if (ctx->le_mode) { \
72 get_avr64(avr, rD(ctx->opcode), false); \
73 gen_qemu_st64_i64(ctx, avr, EA); \
74 tcg_gen_addi_tl(EA, EA, 8); \
75 get_avr64(avr, rD(ctx->opcode), true); \
76 gen_qemu_st64_i64(ctx, avr, EA); \
77 } else { \
78 get_avr64(avr, rD(ctx->opcode), true); \
79 gen_qemu_st64_i64(ctx, avr, EA); \
80 tcg_gen_addi_tl(EA, EA, 8); \
81 get_avr64(avr, rD(ctx->opcode), false); \
82 gen_qemu_st64_i64(ctx, avr, EA); \
83 } \
84 tcg_temp_free(EA); \
85 tcg_temp_free_i64(avr); \
88 #define GEN_VR_LVE(name, opc2, opc3, size) \
89 static void gen_lve##name(DisasContext *ctx) \
90 { \
91 TCGv EA; \
92 TCGv_ptr rs; \
93 if (unlikely(!ctx->altivec_enabled)) { \
94 gen_exception(ctx, POWERPC_EXCP_VPU); \
95 return; \
96 } \
97 gen_set_access_type(ctx, ACCESS_INT); \
98 EA = tcg_temp_new(); \
99 gen_addr_reg_index(ctx, EA); \
100 if (size > 1) { \
101 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
103 rs = gen_avr_ptr(rS(ctx->opcode)); \
104 gen_helper_lve##name(cpu_env, rs, EA); \
105 tcg_temp_free(EA); \
106 tcg_temp_free_ptr(rs); \
109 #define GEN_VR_STVE(name, opc2, opc3, size) \
110 static void gen_stve##name(DisasContext *ctx) \
112 TCGv EA; \
113 TCGv_ptr rs; \
114 if (unlikely(!ctx->altivec_enabled)) { \
115 gen_exception(ctx, POWERPC_EXCP_VPU); \
116 return; \
118 gen_set_access_type(ctx, ACCESS_INT); \
119 EA = tcg_temp_new(); \
120 gen_addr_reg_index(ctx, EA); \
121 if (size > 1) { \
122 tcg_gen_andi_tl(EA, EA, ~(size - 1)); \
124 rs = gen_avr_ptr(rS(ctx->opcode)); \
125 gen_helper_stve##name(cpu_env, rs, EA); \
126 tcg_temp_free(EA); \
127 tcg_temp_free_ptr(rs); \
130 GEN_VR_LDX(lvx, 0x07, 0x03);
131 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
132 GEN_VR_LDX(lvxl, 0x07, 0x0B);
134 GEN_VR_LVE(bx, 0x07, 0x00, 1);
135 GEN_VR_LVE(hx, 0x07, 0x01, 2);
136 GEN_VR_LVE(wx, 0x07, 0x02, 4);
138 GEN_VR_STX(svx, 0x07, 0x07);
139 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
140 GEN_VR_STX(svxl, 0x07, 0x0F);
142 GEN_VR_STVE(bx, 0x07, 0x04, 1);
143 GEN_VR_STVE(hx, 0x07, 0x05, 2);
144 GEN_VR_STVE(wx, 0x07, 0x06, 4);
146 static void gen_lvsl(DisasContext *ctx)
148 TCGv_ptr rd;
149 TCGv EA;
150 if (unlikely(!ctx->altivec_enabled)) {
151 gen_exception(ctx, POWERPC_EXCP_VPU);
152 return;
154 EA = tcg_temp_new();
155 gen_addr_reg_index(ctx, EA);
156 rd = gen_avr_ptr(rD(ctx->opcode));
157 gen_helper_lvsl(rd, EA);
158 tcg_temp_free(EA);
159 tcg_temp_free_ptr(rd);
162 static void gen_lvsr(DisasContext *ctx)
164 TCGv_ptr rd;
165 TCGv EA;
166 if (unlikely(!ctx->altivec_enabled)) {
167 gen_exception(ctx, POWERPC_EXCP_VPU);
168 return;
170 EA = tcg_temp_new();
171 gen_addr_reg_index(ctx, EA);
172 rd = gen_avr_ptr(rD(ctx->opcode));
173 gen_helper_lvsr(rd, EA);
174 tcg_temp_free(EA);
175 tcg_temp_free_ptr(rd);
178 static void gen_mfvscr(DisasContext *ctx)
180 TCGv_i32 t;
181 TCGv_i64 avr;
182 if (unlikely(!ctx->altivec_enabled)) {
183 gen_exception(ctx, POWERPC_EXCP_VPU);
184 return;
186 avr = tcg_temp_new_i64();
187 tcg_gen_movi_i64(avr, 0);
188 set_avr64(rD(ctx->opcode), avr, true);
189 t = tcg_temp_new_i32();
190 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, vscr));
191 tcg_gen_extu_i32_i64(avr, t);
192 set_avr64(rD(ctx->opcode), avr, false);
193 tcg_temp_free_i32(t);
194 tcg_temp_free_i64(avr);
197 static void gen_mtvscr(DisasContext *ctx)
199 TCGv_ptr p;
200 if (unlikely(!ctx->altivec_enabled)) {
201 gen_exception(ctx, POWERPC_EXCP_VPU);
202 return;
204 p = gen_avr_ptr(rB(ctx->opcode));
205 gen_helper_mtvscr(cpu_env, p);
206 tcg_temp_free_ptr(p);
209 #define GEN_VX_VMUL10(name, add_cin, ret_carry) \
210 static void glue(gen_, name)(DisasContext *ctx) \
212 TCGv_i64 t0; \
213 TCGv_i64 t1; \
214 TCGv_i64 t2; \
215 TCGv_i64 avr; \
216 TCGv_i64 ten, z; \
218 if (unlikely(!ctx->altivec_enabled)) { \
219 gen_exception(ctx, POWERPC_EXCP_VPU); \
220 return; \
223 t0 = tcg_temp_new_i64(); \
224 t1 = tcg_temp_new_i64(); \
225 t2 = tcg_temp_new_i64(); \
226 avr = tcg_temp_new_i64(); \
227 ten = tcg_const_i64(10); \
228 z = tcg_const_i64(0); \
230 if (add_cin) { \
231 get_avr64(avr, rA(ctx->opcode), false); \
232 tcg_gen_mulu2_i64(t0, t1, avr, ten); \
233 get_avr64(avr, rB(ctx->opcode), false); \
234 tcg_gen_andi_i64(t2, avr, 0xF); \
235 tcg_gen_add2_i64(avr, t2, t0, t1, t2, z); \
236 set_avr64(rD(ctx->opcode), avr, false); \
237 } else { \
238 get_avr64(avr, rA(ctx->opcode), false); \
239 tcg_gen_mulu2_i64(avr, t2, avr, ten); \
240 set_avr64(rD(ctx->opcode), avr, false); \
243 if (ret_carry) { \
244 get_avr64(avr, rA(ctx->opcode), true); \
245 tcg_gen_mulu2_i64(t0, t1, avr, ten); \
246 tcg_gen_add2_i64(t0, avr, t0, t1, t2, z); \
247 set_avr64(rD(ctx->opcode), avr, false); \
248 set_avr64(rD(ctx->opcode), z, true); \
249 } else { \
250 get_avr64(avr, rA(ctx->opcode), true); \
251 tcg_gen_mul_i64(t0, avr, ten); \
252 tcg_gen_add_i64(avr, t0, t2); \
253 set_avr64(rD(ctx->opcode), avr, true); \
256 tcg_temp_free_i64(t0); \
257 tcg_temp_free_i64(t1); \
258 tcg_temp_free_i64(t2); \
259 tcg_temp_free_i64(avr); \
260 tcg_temp_free_i64(ten); \
261 tcg_temp_free_i64(z); \
264 GEN_VX_VMUL10(vmul10uq, 0, 0);
265 GEN_VX_VMUL10(vmul10euq, 1, 0);
266 GEN_VX_VMUL10(vmul10cuq, 0, 1);
267 GEN_VX_VMUL10(vmul10ecuq, 1, 1);
269 /* Logical operations */
270 #define GEN_VX_LOGICAL(name, tcg_op, opc2, opc3) \
271 static void glue(gen_, name)(DisasContext *ctx) \
273 TCGv_i64 t0; \
274 TCGv_i64 t1; \
275 TCGv_i64 avr; \
277 if (unlikely(!ctx->altivec_enabled)) { \
278 gen_exception(ctx, POWERPC_EXCP_VPU); \
279 return; \
281 t0 = tcg_temp_new_i64(); \
282 t1 = tcg_temp_new_i64(); \
283 avr = tcg_temp_new_i64(); \
285 get_avr64(t0, rA(ctx->opcode), true); \
286 get_avr64(t1, rB(ctx->opcode), true); \
287 tcg_op(avr, t0, t1); \
288 set_avr64(rD(ctx->opcode), avr, true); \
290 get_avr64(t0, rA(ctx->opcode), false); \
291 get_avr64(t1, rB(ctx->opcode), false); \
292 tcg_op(avr, t0, t1); \
293 set_avr64(rD(ctx->opcode), avr, false); \
295 tcg_temp_free_i64(t0); \
296 tcg_temp_free_i64(t1); \
297 tcg_temp_free_i64(avr); \
300 GEN_VX_LOGICAL(vand, tcg_gen_and_i64, 2, 16);
301 GEN_VX_LOGICAL(vandc, tcg_gen_andc_i64, 2, 17);
302 GEN_VX_LOGICAL(vor, tcg_gen_or_i64, 2, 18);
303 GEN_VX_LOGICAL(vxor, tcg_gen_xor_i64, 2, 19);
304 GEN_VX_LOGICAL(vnor, tcg_gen_nor_i64, 2, 20);
305 GEN_VX_LOGICAL(veqv, tcg_gen_eqv_i64, 2, 26);
306 GEN_VX_LOGICAL(vnand, tcg_gen_nand_i64, 2, 22);
307 GEN_VX_LOGICAL(vorc, tcg_gen_orc_i64, 2, 21);
309 #define GEN_VXFORM(name, opc2, opc3) \
310 static void glue(gen_, name)(DisasContext *ctx) \
312 TCGv_ptr ra, rb, rd; \
313 if (unlikely(!ctx->altivec_enabled)) { \
314 gen_exception(ctx, POWERPC_EXCP_VPU); \
315 return; \
317 ra = gen_avr_ptr(rA(ctx->opcode)); \
318 rb = gen_avr_ptr(rB(ctx->opcode)); \
319 rd = gen_avr_ptr(rD(ctx->opcode)); \
320 gen_helper_##name (rd, ra, rb); \
321 tcg_temp_free_ptr(ra); \
322 tcg_temp_free_ptr(rb); \
323 tcg_temp_free_ptr(rd); \
326 #define GEN_VXFORM_ENV(name, opc2, opc3) \
327 static void glue(gen_, name)(DisasContext *ctx) \
329 TCGv_ptr ra, rb, rd; \
330 if (unlikely(!ctx->altivec_enabled)) { \
331 gen_exception(ctx, POWERPC_EXCP_VPU); \
332 return; \
334 ra = gen_avr_ptr(rA(ctx->opcode)); \
335 rb = gen_avr_ptr(rB(ctx->opcode)); \
336 rd = gen_avr_ptr(rD(ctx->opcode)); \
337 gen_helper_##name(cpu_env, rd, ra, rb); \
338 tcg_temp_free_ptr(ra); \
339 tcg_temp_free_ptr(rb); \
340 tcg_temp_free_ptr(rd); \
343 #define GEN_VXFORM3(name, opc2, opc3) \
344 static void glue(gen_, name)(DisasContext *ctx) \
346 TCGv_ptr ra, rb, rc, rd; \
347 if (unlikely(!ctx->altivec_enabled)) { \
348 gen_exception(ctx, POWERPC_EXCP_VPU); \
349 return; \
351 ra = gen_avr_ptr(rA(ctx->opcode)); \
352 rb = gen_avr_ptr(rB(ctx->opcode)); \
353 rc = gen_avr_ptr(rC(ctx->opcode)); \
354 rd = gen_avr_ptr(rD(ctx->opcode)); \
355 gen_helper_##name(rd, ra, rb, rc); \
356 tcg_temp_free_ptr(ra); \
357 tcg_temp_free_ptr(rb); \
358 tcg_temp_free_ptr(rc); \
359 tcg_temp_free_ptr(rd); \
363 * Support for Altivec instruction pairs that use bit 31 (Rc) as
364 * an opcode bit. In general, these pairs come from different
365 * versions of the ISA, so we must also support a pair of flags for
366 * each instruction.
368 #define GEN_VXFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
369 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
371 if ((Rc(ctx->opcode) == 0) && \
372 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
373 gen_##name0(ctx); \
374 } else if ((Rc(ctx->opcode) == 1) && \
375 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
376 gen_##name1(ctx); \
377 } else { \
378 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
382 /* Adds support to provide invalid mask */
383 #define GEN_VXFORM_DUAL_EXT(name0, flg0, flg2_0, inval0, \
384 name1, flg1, flg2_1, inval1) \
385 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
387 if ((Rc(ctx->opcode) == 0) && \
388 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0)) && \
389 !(ctx->opcode & inval0)) { \
390 gen_##name0(ctx); \
391 } else if ((Rc(ctx->opcode) == 1) && \
392 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1)) && \
393 !(ctx->opcode & inval1)) { \
394 gen_##name1(ctx); \
395 } else { \
396 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
400 #define GEN_VXFORM_HETRO(name, opc2, opc3) \
401 static void glue(gen_, name)(DisasContext *ctx) \
403 TCGv_ptr rb; \
404 if (unlikely(!ctx->altivec_enabled)) { \
405 gen_exception(ctx, POWERPC_EXCP_VPU); \
406 return; \
408 rb = gen_avr_ptr(rB(ctx->opcode)); \
409 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], rb); \
410 tcg_temp_free_ptr(rb); \
413 GEN_VXFORM(vaddubm, 0, 0);
414 GEN_VXFORM_DUAL_EXT(vaddubm, PPC_ALTIVEC, PPC_NONE, 0, \
415 vmul10cuq, PPC_NONE, PPC2_ISA300, 0x0000F800)
416 GEN_VXFORM(vadduhm, 0, 1);
417 GEN_VXFORM_DUAL(vadduhm, PPC_ALTIVEC, PPC_NONE, \
418 vmul10ecuq, PPC_NONE, PPC2_ISA300)
419 GEN_VXFORM(vadduwm, 0, 2);
420 GEN_VXFORM(vaddudm, 0, 3);
421 GEN_VXFORM(vsububm, 0, 16);
422 GEN_VXFORM(vsubuhm, 0, 17);
423 GEN_VXFORM(vsubuwm, 0, 18);
424 GEN_VXFORM(vsubudm, 0, 19);
425 GEN_VXFORM(vmaxub, 1, 0);
426 GEN_VXFORM(vmaxuh, 1, 1);
427 GEN_VXFORM(vmaxuw, 1, 2);
428 GEN_VXFORM(vmaxud, 1, 3);
429 GEN_VXFORM(vmaxsb, 1, 4);
430 GEN_VXFORM(vmaxsh, 1, 5);
431 GEN_VXFORM(vmaxsw, 1, 6);
432 GEN_VXFORM(vmaxsd, 1, 7);
433 GEN_VXFORM(vminub, 1, 8);
434 GEN_VXFORM(vminuh, 1, 9);
435 GEN_VXFORM(vminuw, 1, 10);
436 GEN_VXFORM(vminud, 1, 11);
437 GEN_VXFORM(vminsb, 1, 12);
438 GEN_VXFORM(vminsh, 1, 13);
439 GEN_VXFORM(vminsw, 1, 14);
440 GEN_VXFORM(vminsd, 1, 15);
441 GEN_VXFORM(vavgub, 1, 16);
442 GEN_VXFORM(vabsdub, 1, 16);
443 GEN_VXFORM_DUAL(vavgub, PPC_ALTIVEC, PPC_NONE, \
444 vabsdub, PPC_NONE, PPC2_ISA300)
445 GEN_VXFORM(vavguh, 1, 17);
446 GEN_VXFORM(vabsduh, 1, 17);
447 GEN_VXFORM_DUAL(vavguh, PPC_ALTIVEC, PPC_NONE, \
448 vabsduh, PPC_NONE, PPC2_ISA300)
449 GEN_VXFORM(vavguw, 1, 18);
450 GEN_VXFORM(vabsduw, 1, 18);
451 GEN_VXFORM_DUAL(vavguw, PPC_ALTIVEC, PPC_NONE, \
452 vabsduw, PPC_NONE, PPC2_ISA300)
453 GEN_VXFORM(vavgsb, 1, 20);
454 GEN_VXFORM(vavgsh, 1, 21);
455 GEN_VXFORM(vavgsw, 1, 22);
456 GEN_VXFORM(vmrghb, 6, 0);
457 GEN_VXFORM(vmrghh, 6, 1);
458 GEN_VXFORM(vmrghw, 6, 2);
459 GEN_VXFORM(vmrglb, 6, 4);
460 GEN_VXFORM(vmrglh, 6, 5);
461 GEN_VXFORM(vmrglw, 6, 6);
463 static void gen_vmrgew(DisasContext *ctx)
465 TCGv_i64 tmp;
466 TCGv_i64 avr;
467 int VT, VA, VB;
468 if (unlikely(!ctx->altivec_enabled)) {
469 gen_exception(ctx, POWERPC_EXCP_VPU);
470 return;
472 VT = rD(ctx->opcode);
473 VA = rA(ctx->opcode);
474 VB = rB(ctx->opcode);
475 tmp = tcg_temp_new_i64();
476 avr = tcg_temp_new_i64();
478 get_avr64(avr, VB, true);
479 tcg_gen_shri_i64(tmp, avr, 32);
480 get_avr64(avr, VA, true);
481 tcg_gen_deposit_i64(avr, avr, tmp, 0, 32);
482 set_avr64(VT, avr, true);
484 get_avr64(avr, VB, false);
485 tcg_gen_shri_i64(tmp, avr, 32);
486 get_avr64(avr, VA, false);
487 tcg_gen_deposit_i64(avr, avr, tmp, 0, 32);
488 set_avr64(VT, avr, false);
490 tcg_temp_free_i64(tmp);
491 tcg_temp_free_i64(avr);
494 static void gen_vmrgow(DisasContext *ctx)
496 TCGv_i64 t0, t1;
497 TCGv_i64 avr;
498 int VT, VA, VB;
499 if (unlikely(!ctx->altivec_enabled)) {
500 gen_exception(ctx, POWERPC_EXCP_VPU);
501 return;
503 VT = rD(ctx->opcode);
504 VA = rA(ctx->opcode);
505 VB = rB(ctx->opcode);
506 t0 = tcg_temp_new_i64();
507 t1 = tcg_temp_new_i64();
508 avr = tcg_temp_new_i64();
510 get_avr64(t0, VB, true);
511 get_avr64(t1, VA, true);
512 tcg_gen_deposit_i64(avr, t0, t1, 32, 32);
513 set_avr64(VT, avr, true);
515 get_avr64(t0, VB, false);
516 get_avr64(t1, VA, false);
517 tcg_gen_deposit_i64(avr, t0, t1, 32, 32);
518 set_avr64(VT, avr, false);
520 tcg_temp_free_i64(t0);
521 tcg_temp_free_i64(t1);
522 tcg_temp_free_i64(avr);
525 GEN_VXFORM(vmuloub, 4, 0);
526 GEN_VXFORM(vmulouh, 4, 1);
527 GEN_VXFORM(vmulouw, 4, 2);
528 GEN_VXFORM(vmuluwm, 4, 2);
529 GEN_VXFORM_DUAL(vmulouw, PPC_ALTIVEC, PPC_NONE,
530 vmuluwm, PPC_NONE, PPC2_ALTIVEC_207)
531 GEN_VXFORM(vmulosb, 4, 4);
532 GEN_VXFORM(vmulosh, 4, 5);
533 GEN_VXFORM(vmulosw, 4, 6);
534 GEN_VXFORM(vmuleub, 4, 8);
535 GEN_VXFORM(vmuleuh, 4, 9);
536 GEN_VXFORM(vmuleuw, 4, 10);
537 GEN_VXFORM(vmulesb, 4, 12);
538 GEN_VXFORM(vmulesh, 4, 13);
539 GEN_VXFORM(vmulesw, 4, 14);
540 GEN_VXFORM(vslb, 2, 4);
541 GEN_VXFORM(vslh, 2, 5);
542 GEN_VXFORM(vslw, 2, 6);
543 GEN_VXFORM(vrlwnm, 2, 6);
544 GEN_VXFORM_DUAL(vslw, PPC_ALTIVEC, PPC_NONE, \
545 vrlwnm, PPC_NONE, PPC2_ISA300)
546 GEN_VXFORM(vsld, 2, 23);
547 GEN_VXFORM(vsrb, 2, 8);
548 GEN_VXFORM(vsrh, 2, 9);
549 GEN_VXFORM(vsrw, 2, 10);
550 GEN_VXFORM(vsrd, 2, 27);
551 GEN_VXFORM(vsrab, 2, 12);
552 GEN_VXFORM(vsrah, 2, 13);
553 GEN_VXFORM(vsraw, 2, 14);
554 GEN_VXFORM(vsrad, 2, 15);
555 GEN_VXFORM(vsrv, 2, 28);
556 GEN_VXFORM(vslv, 2, 29);
557 GEN_VXFORM(vslo, 6, 16);
558 GEN_VXFORM(vsro, 6, 17);
559 GEN_VXFORM(vaddcuw, 0, 6);
560 GEN_VXFORM(vsubcuw, 0, 22);
561 GEN_VXFORM_ENV(vaddubs, 0, 8);
562 GEN_VXFORM_DUAL_EXT(vaddubs, PPC_ALTIVEC, PPC_NONE, 0, \
563 vmul10uq, PPC_NONE, PPC2_ISA300, 0x0000F800)
564 GEN_VXFORM_ENV(vadduhs, 0, 9);
565 GEN_VXFORM_DUAL(vadduhs, PPC_ALTIVEC, PPC_NONE, \
566 vmul10euq, PPC_NONE, PPC2_ISA300)
567 GEN_VXFORM_ENV(vadduws, 0, 10);
568 GEN_VXFORM_ENV(vaddsbs, 0, 12);
569 GEN_VXFORM_ENV(vaddshs, 0, 13);
570 GEN_VXFORM_ENV(vaddsws, 0, 14);
571 GEN_VXFORM_ENV(vsububs, 0, 24);
572 GEN_VXFORM_ENV(vsubuhs, 0, 25);
573 GEN_VXFORM_ENV(vsubuws, 0, 26);
574 GEN_VXFORM_ENV(vsubsbs, 0, 28);
575 GEN_VXFORM_ENV(vsubshs, 0, 29);
576 GEN_VXFORM_ENV(vsubsws, 0, 30);
577 GEN_VXFORM(vadduqm, 0, 4);
578 GEN_VXFORM(vaddcuq, 0, 5);
579 GEN_VXFORM3(vaddeuqm, 30, 0);
580 GEN_VXFORM3(vaddecuq, 30, 0);
581 GEN_VXFORM_DUAL(vaddeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
582 vaddecuq, PPC_NONE, PPC2_ALTIVEC_207)
583 GEN_VXFORM(vsubuqm, 0, 20);
584 GEN_VXFORM(vsubcuq, 0, 21);
585 GEN_VXFORM3(vsubeuqm, 31, 0);
586 GEN_VXFORM3(vsubecuq, 31, 0);
587 GEN_VXFORM_DUAL(vsubeuqm, PPC_NONE, PPC2_ALTIVEC_207, \
588 vsubecuq, PPC_NONE, PPC2_ALTIVEC_207)
589 GEN_VXFORM(vrlb, 2, 0);
590 GEN_VXFORM(vrlh, 2, 1);
591 GEN_VXFORM(vrlw, 2, 2);
592 GEN_VXFORM(vrlwmi, 2, 2);
593 GEN_VXFORM_DUAL(vrlw, PPC_ALTIVEC, PPC_NONE, \
594 vrlwmi, PPC_NONE, PPC2_ISA300)
595 GEN_VXFORM(vrld, 2, 3);
596 GEN_VXFORM(vrldmi, 2, 3);
597 GEN_VXFORM_DUAL(vrld, PPC_NONE, PPC2_ALTIVEC_207, \
598 vrldmi, PPC_NONE, PPC2_ISA300)
599 GEN_VXFORM(vsl, 2, 7);
600 GEN_VXFORM(vrldnm, 2, 7);
601 GEN_VXFORM_DUAL(vsl, PPC_ALTIVEC, PPC_NONE, \
602 vrldnm, PPC_NONE, PPC2_ISA300)
603 GEN_VXFORM(vsr, 2, 11);
604 GEN_VXFORM_ENV(vpkuhum, 7, 0);
605 GEN_VXFORM_ENV(vpkuwum, 7, 1);
606 GEN_VXFORM_ENV(vpkudum, 7, 17);
607 GEN_VXFORM_ENV(vpkuhus, 7, 2);
608 GEN_VXFORM_ENV(vpkuwus, 7, 3);
609 GEN_VXFORM_ENV(vpkudus, 7, 19);
610 GEN_VXFORM_ENV(vpkshus, 7, 4);
611 GEN_VXFORM_ENV(vpkswus, 7, 5);
612 GEN_VXFORM_ENV(vpksdus, 7, 21);
613 GEN_VXFORM_ENV(vpkshss, 7, 6);
614 GEN_VXFORM_ENV(vpkswss, 7, 7);
615 GEN_VXFORM_ENV(vpksdss, 7, 23);
616 GEN_VXFORM(vpkpx, 7, 12);
617 GEN_VXFORM_ENV(vsum4ubs, 4, 24);
618 GEN_VXFORM_ENV(vsum4sbs, 4, 28);
619 GEN_VXFORM_ENV(vsum4shs, 4, 25);
620 GEN_VXFORM_ENV(vsum2sws, 4, 26);
621 GEN_VXFORM_ENV(vsumsws, 4, 30);
622 GEN_VXFORM_ENV(vaddfp, 5, 0);
623 GEN_VXFORM_ENV(vsubfp, 5, 1);
624 GEN_VXFORM_ENV(vmaxfp, 5, 16);
625 GEN_VXFORM_ENV(vminfp, 5, 17);
626 GEN_VXFORM_HETRO(vextublx, 6, 24)
627 GEN_VXFORM_HETRO(vextuhlx, 6, 25)
628 GEN_VXFORM_HETRO(vextuwlx, 6, 26)
629 GEN_VXFORM_DUAL(vmrgow, PPC_NONE, PPC2_ALTIVEC_207,
630 vextuwlx, PPC_NONE, PPC2_ISA300)
631 GEN_VXFORM_HETRO(vextubrx, 6, 28)
632 GEN_VXFORM_HETRO(vextuhrx, 6, 29)
633 GEN_VXFORM_HETRO(vextuwrx, 6, 30)
634 GEN_VXFORM_DUAL(vmrgew, PPC_NONE, PPC2_ALTIVEC_207, \
635 vextuwrx, PPC_NONE, PPC2_ISA300)
637 #define GEN_VXRFORM1(opname, name, str, opc2, opc3) \
638 static void glue(gen_, name)(DisasContext *ctx) \
640 TCGv_ptr ra, rb, rd; \
641 if (unlikely(!ctx->altivec_enabled)) { \
642 gen_exception(ctx, POWERPC_EXCP_VPU); \
643 return; \
645 ra = gen_avr_ptr(rA(ctx->opcode)); \
646 rb = gen_avr_ptr(rB(ctx->opcode)); \
647 rd = gen_avr_ptr(rD(ctx->opcode)); \
648 gen_helper_##opname(cpu_env, rd, ra, rb); \
649 tcg_temp_free_ptr(ra); \
650 tcg_temp_free_ptr(rb); \
651 tcg_temp_free_ptr(rd); \
654 #define GEN_VXRFORM(name, opc2, opc3) \
655 GEN_VXRFORM1(name, name, #name, opc2, opc3) \
656 GEN_VXRFORM1(name##_dot, name##_, #name ".", opc2, (opc3 | (0x1 << 4)))
659 * Support for Altivec instructions that use bit 31 (Rc) as an opcode
660 * bit but also use bit 21 as an actual Rc bit. In general, thse pairs
661 * come from different versions of the ISA, so we must also support a
662 * pair of flags for each instruction.
664 #define GEN_VXRFORM_DUAL(name0, flg0, flg2_0, name1, flg1, flg2_1) \
665 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
667 if ((Rc(ctx->opcode) == 0) && \
668 ((ctx->insns_flags & flg0) || (ctx->insns_flags2 & flg2_0))) { \
669 if (Rc21(ctx->opcode) == 0) { \
670 gen_##name0(ctx); \
671 } else { \
672 gen_##name0##_(ctx); \
674 } else if ((Rc(ctx->opcode) == 1) && \
675 ((ctx->insns_flags & flg1) || (ctx->insns_flags2 & flg2_1))) { \
676 if (Rc21(ctx->opcode) == 0) { \
677 gen_##name1(ctx); \
678 } else { \
679 gen_##name1##_(ctx); \
681 } else { \
682 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
686 GEN_VXRFORM(vcmpequb, 3, 0)
687 GEN_VXRFORM(vcmpequh, 3, 1)
688 GEN_VXRFORM(vcmpequw, 3, 2)
689 GEN_VXRFORM(vcmpequd, 3, 3)
690 GEN_VXRFORM(vcmpnezb, 3, 4)
691 GEN_VXRFORM(vcmpnezh, 3, 5)
692 GEN_VXRFORM(vcmpnezw, 3, 6)
693 GEN_VXRFORM(vcmpgtsb, 3, 12)
694 GEN_VXRFORM(vcmpgtsh, 3, 13)
695 GEN_VXRFORM(vcmpgtsw, 3, 14)
696 GEN_VXRFORM(vcmpgtsd, 3, 15)
697 GEN_VXRFORM(vcmpgtub, 3, 8)
698 GEN_VXRFORM(vcmpgtuh, 3, 9)
699 GEN_VXRFORM(vcmpgtuw, 3, 10)
700 GEN_VXRFORM(vcmpgtud, 3, 11)
701 GEN_VXRFORM(vcmpeqfp, 3, 3)
702 GEN_VXRFORM(vcmpgefp, 3, 7)
703 GEN_VXRFORM(vcmpgtfp, 3, 11)
704 GEN_VXRFORM(vcmpbfp, 3, 15)
705 GEN_VXRFORM(vcmpneb, 3, 0)
706 GEN_VXRFORM(vcmpneh, 3, 1)
707 GEN_VXRFORM(vcmpnew, 3, 2)
709 GEN_VXRFORM_DUAL(vcmpequb, PPC_ALTIVEC, PPC_NONE, \
710 vcmpneb, PPC_NONE, PPC2_ISA300)
711 GEN_VXRFORM_DUAL(vcmpequh, PPC_ALTIVEC, PPC_NONE, \
712 vcmpneh, PPC_NONE, PPC2_ISA300)
713 GEN_VXRFORM_DUAL(vcmpequw, PPC_ALTIVEC, PPC_NONE, \
714 vcmpnew, PPC_NONE, PPC2_ISA300)
715 GEN_VXRFORM_DUAL(vcmpeqfp, PPC_ALTIVEC, PPC_NONE, \
716 vcmpequd, PPC_NONE, PPC2_ALTIVEC_207)
717 GEN_VXRFORM_DUAL(vcmpbfp, PPC_ALTIVEC, PPC_NONE, \
718 vcmpgtsd, PPC_NONE, PPC2_ALTIVEC_207)
719 GEN_VXRFORM_DUAL(vcmpgtfp, PPC_ALTIVEC, PPC_NONE, \
720 vcmpgtud, PPC_NONE, PPC2_ALTIVEC_207)
722 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
723 static void glue(gen_, name)(DisasContext *ctx) \
725 TCGv_ptr rd; \
726 TCGv_i32 simm; \
727 if (unlikely(!ctx->altivec_enabled)) { \
728 gen_exception(ctx, POWERPC_EXCP_VPU); \
729 return; \
731 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
732 rd = gen_avr_ptr(rD(ctx->opcode)); \
733 gen_helper_##name (rd, simm); \
734 tcg_temp_free_i32(simm); \
735 tcg_temp_free_ptr(rd); \
738 GEN_VXFORM_SIMM(vspltisb, 6, 12);
739 GEN_VXFORM_SIMM(vspltish, 6, 13);
740 GEN_VXFORM_SIMM(vspltisw, 6, 14);
742 #define GEN_VXFORM_NOA(name, opc2, opc3) \
743 static void glue(gen_, name)(DisasContext *ctx) \
745 TCGv_ptr rb, rd; \
746 if (unlikely(!ctx->altivec_enabled)) { \
747 gen_exception(ctx, POWERPC_EXCP_VPU); \
748 return; \
750 rb = gen_avr_ptr(rB(ctx->opcode)); \
751 rd = gen_avr_ptr(rD(ctx->opcode)); \
752 gen_helper_##name (rd, rb); \
753 tcg_temp_free_ptr(rb); \
754 tcg_temp_free_ptr(rd); \
757 #define GEN_VXFORM_NOA_ENV(name, opc2, opc3) \
758 static void glue(gen_, name)(DisasContext *ctx) \
760 TCGv_ptr rb, rd; \
762 if (unlikely(!ctx->altivec_enabled)) { \
763 gen_exception(ctx, POWERPC_EXCP_VPU); \
764 return; \
766 rb = gen_avr_ptr(rB(ctx->opcode)); \
767 rd = gen_avr_ptr(rD(ctx->opcode)); \
768 gen_helper_##name(cpu_env, rd, rb); \
769 tcg_temp_free_ptr(rb); \
770 tcg_temp_free_ptr(rd); \
773 #define GEN_VXFORM_NOA_2(name, opc2, opc3, opc4) \
774 static void glue(gen_, name)(DisasContext *ctx) \
776 TCGv_ptr rb, rd; \
777 if (unlikely(!ctx->altivec_enabled)) { \
778 gen_exception(ctx, POWERPC_EXCP_VPU); \
779 return; \
781 rb = gen_avr_ptr(rB(ctx->opcode)); \
782 rd = gen_avr_ptr(rD(ctx->opcode)); \
783 gen_helper_##name(rd, rb); \
784 tcg_temp_free_ptr(rb); \
785 tcg_temp_free_ptr(rd); \
788 #define GEN_VXFORM_NOA_3(name, opc2, opc3, opc4) \
789 static void glue(gen_, name)(DisasContext *ctx) \
791 TCGv_ptr rb; \
792 if (unlikely(!ctx->altivec_enabled)) { \
793 gen_exception(ctx, POWERPC_EXCP_VPU); \
794 return; \
796 rb = gen_avr_ptr(rB(ctx->opcode)); \
797 gen_helper_##name(cpu_gpr[rD(ctx->opcode)], rb); \
798 tcg_temp_free_ptr(rb); \
800 GEN_VXFORM_NOA(vupkhsb, 7, 8);
801 GEN_VXFORM_NOA(vupkhsh, 7, 9);
802 GEN_VXFORM_NOA(vupkhsw, 7, 25);
803 GEN_VXFORM_NOA(vupklsb, 7, 10);
804 GEN_VXFORM_NOA(vupklsh, 7, 11);
805 GEN_VXFORM_NOA(vupklsw, 7, 27);
806 GEN_VXFORM_NOA(vupkhpx, 7, 13);
807 GEN_VXFORM_NOA(vupklpx, 7, 15);
808 GEN_VXFORM_NOA_ENV(vrefp, 5, 4);
809 GEN_VXFORM_NOA_ENV(vrsqrtefp, 5, 5);
810 GEN_VXFORM_NOA_ENV(vexptefp, 5, 6);
811 GEN_VXFORM_NOA_ENV(vlogefp, 5, 7);
812 GEN_VXFORM_NOA_ENV(vrfim, 5, 11);
813 GEN_VXFORM_NOA_ENV(vrfin, 5, 8);
814 GEN_VXFORM_NOA_ENV(vrfip, 5, 10);
815 GEN_VXFORM_NOA_ENV(vrfiz, 5, 9);
816 GEN_VXFORM_NOA(vprtybw, 1, 24);
817 GEN_VXFORM_NOA(vprtybd, 1, 24);
818 GEN_VXFORM_NOA(vprtybq, 1, 24);
820 #define GEN_VXFORM_SIMM(name, opc2, opc3) \
821 static void glue(gen_, name)(DisasContext *ctx) \
823 TCGv_ptr rd; \
824 TCGv_i32 simm; \
825 if (unlikely(!ctx->altivec_enabled)) { \
826 gen_exception(ctx, POWERPC_EXCP_VPU); \
827 return; \
829 simm = tcg_const_i32(SIMM5(ctx->opcode)); \
830 rd = gen_avr_ptr(rD(ctx->opcode)); \
831 gen_helper_##name (rd, simm); \
832 tcg_temp_free_i32(simm); \
833 tcg_temp_free_ptr(rd); \
836 #define GEN_VXFORM_UIMM(name, opc2, opc3) \
837 static void glue(gen_, name)(DisasContext *ctx) \
839 TCGv_ptr rb, rd; \
840 TCGv_i32 uimm; \
841 if (unlikely(!ctx->altivec_enabled)) { \
842 gen_exception(ctx, POWERPC_EXCP_VPU); \
843 return; \
845 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
846 rb = gen_avr_ptr(rB(ctx->opcode)); \
847 rd = gen_avr_ptr(rD(ctx->opcode)); \
848 gen_helper_##name (rd, rb, uimm); \
849 tcg_temp_free_i32(uimm); \
850 tcg_temp_free_ptr(rb); \
851 tcg_temp_free_ptr(rd); \
854 #define GEN_VXFORM_UIMM_ENV(name, opc2, opc3) \
855 static void glue(gen_, name)(DisasContext *ctx) \
857 TCGv_ptr rb, rd; \
858 TCGv_i32 uimm; \
860 if (unlikely(!ctx->altivec_enabled)) { \
861 gen_exception(ctx, POWERPC_EXCP_VPU); \
862 return; \
864 uimm = tcg_const_i32(UIMM5(ctx->opcode)); \
865 rb = gen_avr_ptr(rB(ctx->opcode)); \
866 rd = gen_avr_ptr(rD(ctx->opcode)); \
867 gen_helper_##name(cpu_env, rd, rb, uimm); \
868 tcg_temp_free_i32(uimm); \
869 tcg_temp_free_ptr(rb); \
870 tcg_temp_free_ptr(rd); \
873 #define GEN_VXFORM_UIMM_SPLAT(name, opc2, opc3, splat_max) \
874 static void glue(gen_, name)(DisasContext *ctx) \
876 TCGv_ptr rb, rd; \
877 uint8_t uimm = UIMM4(ctx->opcode); \
878 TCGv_i32 t0; \
879 if (unlikely(!ctx->altivec_enabled)) { \
880 gen_exception(ctx, POWERPC_EXCP_VPU); \
881 return; \
883 if (uimm > splat_max) { \
884 uimm = 0; \
886 t0 = tcg_temp_new_i32(); \
887 tcg_gen_movi_i32(t0, uimm); \
888 rb = gen_avr_ptr(rB(ctx->opcode)); \
889 rd = gen_avr_ptr(rD(ctx->opcode)); \
890 gen_helper_##name(rd, rb, t0); \
891 tcg_temp_free_i32(t0); \
892 tcg_temp_free_ptr(rb); \
893 tcg_temp_free_ptr(rd); \
896 GEN_VXFORM_UIMM(vspltb, 6, 8);
897 GEN_VXFORM_UIMM(vsplth, 6, 9);
898 GEN_VXFORM_UIMM(vspltw, 6, 10);
899 GEN_VXFORM_UIMM_SPLAT(vextractub, 6, 8, 15);
900 GEN_VXFORM_UIMM_SPLAT(vextractuh, 6, 9, 14);
901 GEN_VXFORM_UIMM_SPLAT(vextractuw, 6, 10, 12);
902 GEN_VXFORM_UIMM_SPLAT(vextractd, 6, 11, 8);
903 GEN_VXFORM_UIMM_SPLAT(vinsertb, 6, 12, 15);
904 GEN_VXFORM_UIMM_SPLAT(vinserth, 6, 13, 14);
905 GEN_VXFORM_UIMM_SPLAT(vinsertw, 6, 14, 12);
906 GEN_VXFORM_UIMM_SPLAT(vinsertd, 6, 15, 8);
907 GEN_VXFORM_UIMM_ENV(vcfux, 5, 12);
908 GEN_VXFORM_UIMM_ENV(vcfsx, 5, 13);
909 GEN_VXFORM_UIMM_ENV(vctuxs, 5, 14);
910 GEN_VXFORM_UIMM_ENV(vctsxs, 5, 15);
911 GEN_VXFORM_DUAL(vspltb, PPC_ALTIVEC, PPC_NONE,
912 vextractub, PPC_NONE, PPC2_ISA300);
913 GEN_VXFORM_DUAL(vsplth, PPC_ALTIVEC, PPC_NONE,
914 vextractuh, PPC_NONE, PPC2_ISA300);
915 GEN_VXFORM_DUAL(vspltw, PPC_ALTIVEC, PPC_NONE,
916 vextractuw, PPC_NONE, PPC2_ISA300);
917 GEN_VXFORM_DUAL(vspltisb, PPC_ALTIVEC, PPC_NONE,
918 vinsertb, PPC_NONE, PPC2_ISA300);
919 GEN_VXFORM_DUAL(vspltish, PPC_ALTIVEC, PPC_NONE,
920 vinserth, PPC_NONE, PPC2_ISA300);
921 GEN_VXFORM_DUAL(vspltisw, PPC_ALTIVEC, PPC_NONE,
922 vinsertw, PPC_NONE, PPC2_ISA300);
924 static void gen_vsldoi(DisasContext *ctx)
926 TCGv_ptr ra, rb, rd;
927 TCGv_i32 sh;
928 if (unlikely(!ctx->altivec_enabled)) {
929 gen_exception(ctx, POWERPC_EXCP_VPU);
930 return;
932 ra = gen_avr_ptr(rA(ctx->opcode));
933 rb = gen_avr_ptr(rB(ctx->opcode));
934 rd = gen_avr_ptr(rD(ctx->opcode));
935 sh = tcg_const_i32(VSH(ctx->opcode));
936 gen_helper_vsldoi (rd, ra, rb, sh);
937 tcg_temp_free_ptr(ra);
938 tcg_temp_free_ptr(rb);
939 tcg_temp_free_ptr(rd);
940 tcg_temp_free_i32(sh);
943 #define GEN_VAFORM_PAIRED(name0, name1, opc2) \
944 static void glue(gen_, name0##_##name1)(DisasContext *ctx) \
946 TCGv_ptr ra, rb, rc, rd; \
947 if (unlikely(!ctx->altivec_enabled)) { \
948 gen_exception(ctx, POWERPC_EXCP_VPU); \
949 return; \
951 ra = gen_avr_ptr(rA(ctx->opcode)); \
952 rb = gen_avr_ptr(rB(ctx->opcode)); \
953 rc = gen_avr_ptr(rC(ctx->opcode)); \
954 rd = gen_avr_ptr(rD(ctx->opcode)); \
955 if (Rc(ctx->opcode)) { \
956 gen_helper_##name1(cpu_env, rd, ra, rb, rc); \
957 } else { \
958 gen_helper_##name0(cpu_env, rd, ra, rb, rc); \
960 tcg_temp_free_ptr(ra); \
961 tcg_temp_free_ptr(rb); \
962 tcg_temp_free_ptr(rc); \
963 tcg_temp_free_ptr(rd); \
966 GEN_VAFORM_PAIRED(vmhaddshs, vmhraddshs, 16)
968 static void gen_vmladduhm(DisasContext *ctx)
970 TCGv_ptr ra, rb, rc, rd;
971 if (unlikely(!ctx->altivec_enabled)) {
972 gen_exception(ctx, POWERPC_EXCP_VPU);
973 return;
975 ra = gen_avr_ptr(rA(ctx->opcode));
976 rb = gen_avr_ptr(rB(ctx->opcode));
977 rc = gen_avr_ptr(rC(ctx->opcode));
978 rd = gen_avr_ptr(rD(ctx->opcode));
979 gen_helper_vmladduhm(rd, ra, rb, rc);
980 tcg_temp_free_ptr(ra);
981 tcg_temp_free_ptr(rb);
982 tcg_temp_free_ptr(rc);
983 tcg_temp_free_ptr(rd);
986 static void gen_vpermr(DisasContext *ctx)
988 TCGv_ptr ra, rb, rc, rd;
989 if (unlikely(!ctx->altivec_enabled)) {
990 gen_exception(ctx, POWERPC_EXCP_VPU);
991 return;
993 ra = gen_avr_ptr(rA(ctx->opcode));
994 rb = gen_avr_ptr(rB(ctx->opcode));
995 rc = gen_avr_ptr(rC(ctx->opcode));
996 rd = gen_avr_ptr(rD(ctx->opcode));
997 gen_helper_vpermr(cpu_env, rd, ra, rb, rc);
998 tcg_temp_free_ptr(ra);
999 tcg_temp_free_ptr(rb);
1000 tcg_temp_free_ptr(rc);
1001 tcg_temp_free_ptr(rd);
1004 GEN_VAFORM_PAIRED(vmsumubm, vmsummbm, 18)
1005 GEN_VAFORM_PAIRED(vmsumuhm, vmsumuhs, 19)
1006 GEN_VAFORM_PAIRED(vmsumshm, vmsumshs, 20)
1007 GEN_VAFORM_PAIRED(vsel, vperm, 21)
1008 GEN_VAFORM_PAIRED(vmaddfp, vnmsubfp, 23)
1010 GEN_VXFORM_NOA(vclzb, 1, 28)
1011 GEN_VXFORM_NOA(vclzh, 1, 29)
1012 GEN_VXFORM_NOA(vclzw, 1, 30)
1013 GEN_VXFORM_NOA(vclzd, 1, 31)
1014 GEN_VXFORM_NOA_2(vnegw, 1, 24, 6)
1015 GEN_VXFORM_NOA_2(vnegd, 1, 24, 7)
1016 GEN_VXFORM_NOA_2(vextsb2w, 1, 24, 16)
1017 GEN_VXFORM_NOA_2(vextsh2w, 1, 24, 17)
1018 GEN_VXFORM_NOA_2(vextsb2d, 1, 24, 24)
1019 GEN_VXFORM_NOA_2(vextsh2d, 1, 24, 25)
1020 GEN_VXFORM_NOA_2(vextsw2d, 1, 24, 26)
1021 GEN_VXFORM_NOA_2(vctzb, 1, 24, 28)
1022 GEN_VXFORM_NOA_2(vctzh, 1, 24, 29)
1023 GEN_VXFORM_NOA_2(vctzw, 1, 24, 30)
1024 GEN_VXFORM_NOA_2(vctzd, 1, 24, 31)
1025 GEN_VXFORM_NOA_3(vclzlsbb, 1, 24, 0)
1026 GEN_VXFORM_NOA_3(vctzlsbb, 1, 24, 1)
1027 GEN_VXFORM_NOA(vpopcntb, 1, 28)
1028 GEN_VXFORM_NOA(vpopcnth, 1, 29)
1029 GEN_VXFORM_NOA(vpopcntw, 1, 30)
1030 GEN_VXFORM_NOA(vpopcntd, 1, 31)
1031 GEN_VXFORM_DUAL(vclzb, PPC_NONE, PPC2_ALTIVEC_207, \
1032 vpopcntb, PPC_NONE, PPC2_ALTIVEC_207)
1033 GEN_VXFORM_DUAL(vclzh, PPC_NONE, PPC2_ALTIVEC_207, \
1034 vpopcnth, PPC_NONE, PPC2_ALTIVEC_207)
1035 GEN_VXFORM_DUAL(vclzw, PPC_NONE, PPC2_ALTIVEC_207, \
1036 vpopcntw, PPC_NONE, PPC2_ALTIVEC_207)
1037 GEN_VXFORM_DUAL(vclzd, PPC_NONE, PPC2_ALTIVEC_207, \
1038 vpopcntd, PPC_NONE, PPC2_ALTIVEC_207)
1039 GEN_VXFORM(vbpermd, 6, 23);
1040 GEN_VXFORM(vbpermq, 6, 21);
1041 GEN_VXFORM_NOA(vgbbd, 6, 20);
1042 GEN_VXFORM(vpmsumb, 4, 16)
1043 GEN_VXFORM(vpmsumh, 4, 17)
1044 GEN_VXFORM(vpmsumw, 4, 18)
1045 GEN_VXFORM(vpmsumd, 4, 19)
1047 #define GEN_BCD(op) \
1048 static void gen_##op(DisasContext *ctx) \
1050 TCGv_ptr ra, rb, rd; \
1051 TCGv_i32 ps; \
1053 if (unlikely(!ctx->altivec_enabled)) { \
1054 gen_exception(ctx, POWERPC_EXCP_VPU); \
1055 return; \
1058 ra = gen_avr_ptr(rA(ctx->opcode)); \
1059 rb = gen_avr_ptr(rB(ctx->opcode)); \
1060 rd = gen_avr_ptr(rD(ctx->opcode)); \
1062 ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
1064 gen_helper_##op(cpu_crf[6], rd, ra, rb, ps); \
1066 tcg_temp_free_ptr(ra); \
1067 tcg_temp_free_ptr(rb); \
1068 tcg_temp_free_ptr(rd); \
1069 tcg_temp_free_i32(ps); \
1072 #define GEN_BCD2(op) \
1073 static void gen_##op(DisasContext *ctx) \
1075 TCGv_ptr rd, rb; \
1076 TCGv_i32 ps; \
1078 if (unlikely(!ctx->altivec_enabled)) { \
1079 gen_exception(ctx, POWERPC_EXCP_VPU); \
1080 return; \
1083 rb = gen_avr_ptr(rB(ctx->opcode)); \
1084 rd = gen_avr_ptr(rD(ctx->opcode)); \
1086 ps = tcg_const_i32((ctx->opcode & 0x200) != 0); \
1088 gen_helper_##op(cpu_crf[6], rd, rb, ps); \
1090 tcg_temp_free_ptr(rb); \
1091 tcg_temp_free_ptr(rd); \
1092 tcg_temp_free_i32(ps); \
1095 GEN_BCD(bcdadd)
1096 GEN_BCD(bcdsub)
1097 GEN_BCD2(bcdcfn)
1098 GEN_BCD2(bcdctn)
1099 GEN_BCD2(bcdcfz)
1100 GEN_BCD2(bcdctz)
1101 GEN_BCD2(bcdcfsq)
1102 GEN_BCD2(bcdctsq)
1103 GEN_BCD2(bcdsetsgn)
1104 GEN_BCD(bcdcpsgn);
1105 GEN_BCD(bcds);
1106 GEN_BCD(bcdus);
1107 GEN_BCD(bcdsr);
1108 GEN_BCD(bcdtrunc);
1109 GEN_BCD(bcdutrunc);
1111 static void gen_xpnd04_1(DisasContext *ctx)
1113 switch (opc4(ctx->opcode)) {
1114 case 0:
1115 gen_bcdctsq(ctx);
1116 break;
1117 case 2:
1118 gen_bcdcfsq(ctx);
1119 break;
1120 case 4:
1121 gen_bcdctz(ctx);
1122 break;
1123 case 5:
1124 gen_bcdctn(ctx);
1125 break;
1126 case 6:
1127 gen_bcdcfz(ctx);
1128 break;
1129 case 7:
1130 gen_bcdcfn(ctx);
1131 break;
1132 case 31:
1133 gen_bcdsetsgn(ctx);
1134 break;
1135 default:
1136 gen_invalid(ctx);
1137 break;
1141 static void gen_xpnd04_2(DisasContext *ctx)
1143 switch (opc4(ctx->opcode)) {
1144 case 0:
1145 gen_bcdctsq(ctx);
1146 break;
1147 case 2:
1148 gen_bcdcfsq(ctx);
1149 break;
1150 case 4:
1151 gen_bcdctz(ctx);
1152 break;
1153 case 6:
1154 gen_bcdcfz(ctx);
1155 break;
1156 case 7:
1157 gen_bcdcfn(ctx);
1158 break;
1159 case 31:
1160 gen_bcdsetsgn(ctx);
1161 break;
1162 default:
1163 gen_invalid(ctx);
1164 break;
1169 GEN_VXFORM_DUAL(vsubcuw, PPC_ALTIVEC, PPC_NONE, \
1170 xpnd04_1, PPC_NONE, PPC2_ISA300)
1171 GEN_VXFORM_DUAL(vsubsws, PPC_ALTIVEC, PPC_NONE, \
1172 xpnd04_2, PPC_NONE, PPC2_ISA300)
1174 GEN_VXFORM_DUAL(vsububm, PPC_ALTIVEC, PPC_NONE, \
1175 bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
1176 GEN_VXFORM_DUAL(vsububs, PPC_ALTIVEC, PPC_NONE, \
1177 bcdadd, PPC_NONE, PPC2_ALTIVEC_207)
1178 GEN_VXFORM_DUAL(vsubuhm, PPC_ALTIVEC, PPC_NONE, \
1179 bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
1180 GEN_VXFORM_DUAL(vsubuhs, PPC_ALTIVEC, PPC_NONE, \
1181 bcdsub, PPC_NONE, PPC2_ALTIVEC_207)
1182 GEN_VXFORM_DUAL(vaddshs, PPC_ALTIVEC, PPC_NONE, \
1183 bcdcpsgn, PPC_NONE, PPC2_ISA300)
1184 GEN_VXFORM_DUAL(vsubudm, PPC2_ALTIVEC_207, PPC_NONE, \
1185 bcds, PPC_NONE, PPC2_ISA300)
1186 GEN_VXFORM_DUAL(vsubuwm, PPC_ALTIVEC, PPC_NONE, \
1187 bcdus, PPC_NONE, PPC2_ISA300)
1188 GEN_VXFORM_DUAL(vsubsbs, PPC_ALTIVEC, PPC_NONE, \
1189 bcdtrunc, PPC_NONE, PPC2_ISA300)
1190 GEN_VXFORM_DUAL(vsubuqm, PPC2_ALTIVEC_207, PPC_NONE, \
1191 bcdtrunc, PPC_NONE, PPC2_ISA300)
1192 GEN_VXFORM_DUAL(vsubcuq, PPC2_ALTIVEC_207, PPC_NONE, \
1193 bcdutrunc, PPC_NONE, PPC2_ISA300)
1196 static void gen_vsbox(DisasContext *ctx)
1198 TCGv_ptr ra, rd;
1199 if (unlikely(!ctx->altivec_enabled)) {
1200 gen_exception(ctx, POWERPC_EXCP_VPU);
1201 return;
1203 ra = gen_avr_ptr(rA(ctx->opcode));
1204 rd = gen_avr_ptr(rD(ctx->opcode));
1205 gen_helper_vsbox(rd, ra);
1206 tcg_temp_free_ptr(ra);
1207 tcg_temp_free_ptr(rd);
1210 GEN_VXFORM(vcipher, 4, 20)
1211 GEN_VXFORM(vcipherlast, 4, 20)
1212 GEN_VXFORM(vncipher, 4, 21)
1213 GEN_VXFORM(vncipherlast, 4, 21)
1215 GEN_VXFORM_DUAL(vcipher, PPC_NONE, PPC2_ALTIVEC_207,
1216 vcipherlast, PPC_NONE, PPC2_ALTIVEC_207)
1217 GEN_VXFORM_DUAL(vncipher, PPC_NONE, PPC2_ALTIVEC_207,
1218 vncipherlast, PPC_NONE, PPC2_ALTIVEC_207)
1220 #define VSHASIGMA(op) \
1221 static void gen_##op(DisasContext *ctx) \
1223 TCGv_ptr ra, rd; \
1224 TCGv_i32 st_six; \
1225 if (unlikely(!ctx->altivec_enabled)) { \
1226 gen_exception(ctx, POWERPC_EXCP_VPU); \
1227 return; \
1229 ra = gen_avr_ptr(rA(ctx->opcode)); \
1230 rd = gen_avr_ptr(rD(ctx->opcode)); \
1231 st_six = tcg_const_i32(rB(ctx->opcode)); \
1232 gen_helper_##op(rd, ra, st_six); \
1233 tcg_temp_free_ptr(ra); \
1234 tcg_temp_free_ptr(rd); \
1235 tcg_temp_free_i32(st_six); \
1238 VSHASIGMA(vshasigmaw)
1239 VSHASIGMA(vshasigmad)
1241 GEN_VXFORM3(vpermxor, 22, 0xFF)
1242 GEN_VXFORM_DUAL(vsldoi, PPC_ALTIVEC, PPC_NONE,
1243 vpermxor, PPC_NONE, PPC2_ALTIVEC_207)
1245 #undef GEN_VR_LDX
1246 #undef GEN_VR_STX
1247 #undef GEN_VR_LVE
1248 #undef GEN_VR_STVE
1250 #undef GEN_VX_LOGICAL
1251 #undef GEN_VX_LOGICAL_207
1252 #undef GEN_VXFORM
1253 #undef GEN_VXFORM_207
1254 #undef GEN_VXFORM_DUAL
1255 #undef GEN_VXRFORM_DUAL
1256 #undef GEN_VXRFORM1
1257 #undef GEN_VXRFORM
1258 #undef GEN_VXFORM_SIMM
1259 #undef GEN_VXFORM_NOA
1260 #undef GEN_VXFORM_UIMM
1261 #undef GEN_VAFORM_PAIRED
1263 #undef GEN_BCD2