target/ppc: Fix xxspltib
[qemu/ar7.git] / target / ppc / translate / vsx-impl.inc.c
blob4b8f6cefe3c07e1f85de24023914a46e67bf4a07
1 /*** VSX extension ***/
3 static inline void get_cpu_vsrh(TCGv_i64 dst, int n)
5 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, true));
8 static inline void get_cpu_vsrl(TCGv_i64 dst, int n)
10 tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, false));
13 static inline void set_cpu_vsrh(int n, TCGv_i64 src)
15 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, true));
18 static inline void set_cpu_vsrl(int n, TCGv_i64 src)
20 tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, false));
23 #define VSX_LOAD_SCALAR(name, operation) \
24 static void gen_##name(DisasContext *ctx) \
25 { \
26 TCGv EA; \
27 TCGv_i64 t0; \
28 if (unlikely(!ctx->vsx_enabled)) { \
29 gen_exception(ctx, POWERPC_EXCP_VSXU); \
30 return; \
31 } \
32 t0 = tcg_temp_new_i64(); \
33 gen_set_access_type(ctx, ACCESS_INT); \
34 EA = tcg_temp_new(); \
35 gen_addr_reg_index(ctx, EA); \
36 gen_qemu_##operation(ctx, t0, EA); \
37 set_cpu_vsrh(xT(ctx->opcode), t0); \
38 /* NOTE: cpu_vsrl is undefined */ \
39 tcg_temp_free(EA); \
40 tcg_temp_free_i64(t0); \
43 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
44 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
45 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
46 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
47 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
48 VSX_LOAD_SCALAR(lxsspx, ld32fs)
50 static void gen_lxvd2x(DisasContext *ctx)
52 TCGv EA;
53 TCGv_i64 t0;
54 if (unlikely(!ctx->vsx_enabled)) {
55 gen_exception(ctx, POWERPC_EXCP_VSXU);
56 return;
58 t0 = tcg_temp_new_i64();
59 gen_set_access_type(ctx, ACCESS_INT);
60 EA = tcg_temp_new();
61 gen_addr_reg_index(ctx, EA);
62 gen_qemu_ld64_i64(ctx, t0, EA);
63 set_cpu_vsrh(xT(ctx->opcode), t0);
64 tcg_gen_addi_tl(EA, EA, 8);
65 gen_qemu_ld64_i64(ctx, t0, EA);
66 set_cpu_vsrl(xT(ctx->opcode), t0);
67 tcg_temp_free(EA);
68 tcg_temp_free_i64(t0);
71 static void gen_lxvdsx(DisasContext *ctx)
73 TCGv EA;
74 TCGv_i64 t0;
75 TCGv_i64 t1;
76 if (unlikely(!ctx->vsx_enabled)) {
77 gen_exception(ctx, POWERPC_EXCP_VSXU);
78 return;
80 t0 = tcg_temp_new_i64();
81 t1 = tcg_temp_new_i64();
82 gen_set_access_type(ctx, ACCESS_INT);
83 EA = tcg_temp_new();
84 gen_addr_reg_index(ctx, EA);
85 gen_qemu_ld64_i64(ctx, t0, EA);
86 set_cpu_vsrh(xT(ctx->opcode), t0);
87 tcg_gen_mov_i64(t1, t0);
88 set_cpu_vsrl(xT(ctx->opcode), t1);
89 tcg_temp_free(EA);
90 tcg_temp_free_i64(t0);
91 tcg_temp_free_i64(t1);
94 static void gen_lxvw4x(DisasContext *ctx)
96 TCGv EA;
97 TCGv_i64 xth;
98 TCGv_i64 xtl;
99 if (unlikely(!ctx->vsx_enabled)) {
100 gen_exception(ctx, POWERPC_EXCP_VSXU);
101 return;
103 xth = tcg_temp_new_i64();
104 xtl = tcg_temp_new_i64();
105 get_cpu_vsrh(xth, xT(ctx->opcode));
106 get_cpu_vsrl(xtl, xT(ctx->opcode));
107 gen_set_access_type(ctx, ACCESS_INT);
108 EA = tcg_temp_new();
110 gen_addr_reg_index(ctx, EA);
111 if (ctx->le_mode) {
112 TCGv_i64 t0 = tcg_temp_new_i64();
113 TCGv_i64 t1 = tcg_temp_new_i64();
115 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
116 tcg_gen_shri_i64(t1, t0, 32);
117 tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
118 tcg_gen_addi_tl(EA, EA, 8);
119 tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEQ);
120 tcg_gen_shri_i64(t1, t0, 32);
121 tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
122 tcg_temp_free_i64(t0);
123 tcg_temp_free_i64(t1);
124 } else {
125 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
126 tcg_gen_addi_tl(EA, EA, 8);
127 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
129 tcg_temp_free(EA);
130 tcg_temp_free_i64(xth);
131 tcg_temp_free_i64(xtl);
134 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
135 TCGv_i64 inh, TCGv_i64 inl)
137 TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
138 TCGv_i64 t0 = tcg_temp_new_i64();
139 TCGv_i64 t1 = tcg_temp_new_i64();
141 /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
142 tcg_gen_and_i64(t0, inh, mask);
143 tcg_gen_shli_i64(t0, t0, 8);
144 tcg_gen_shri_i64(t1, inh, 8);
145 tcg_gen_and_i64(t1, t1, mask);
146 tcg_gen_or_i64(outh, t0, t1);
148 /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
149 tcg_gen_and_i64(t0, inl, mask);
150 tcg_gen_shli_i64(t0, t0, 8);
151 tcg_gen_shri_i64(t1, inl, 8);
152 tcg_gen_and_i64(t1, t1, mask);
153 tcg_gen_or_i64(outl, t0, t1);
155 tcg_temp_free_i64(t0);
156 tcg_temp_free_i64(t1);
157 tcg_temp_free_i64(mask);
160 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
161 TCGv_i64 inh, TCGv_i64 inl)
163 TCGv_i64 hi = tcg_temp_new_i64();
164 TCGv_i64 lo = tcg_temp_new_i64();
166 tcg_gen_bswap64_i64(hi, inh);
167 tcg_gen_bswap64_i64(lo, inl);
168 tcg_gen_shri_i64(outh, hi, 32);
169 tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
170 tcg_gen_shri_i64(outl, lo, 32);
171 tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
173 tcg_temp_free_i64(hi);
174 tcg_temp_free_i64(lo);
176 static void gen_lxvh8x(DisasContext *ctx)
178 TCGv EA;
179 TCGv_i64 xth;
180 TCGv_i64 xtl;
182 if (unlikely(!ctx->vsx_enabled)) {
183 gen_exception(ctx, POWERPC_EXCP_VSXU);
184 return;
186 xth = tcg_temp_new_i64();
187 xtl = tcg_temp_new_i64();
188 get_cpu_vsrh(xth, xT(ctx->opcode));
189 get_cpu_vsrl(xtl, xT(ctx->opcode));
190 gen_set_access_type(ctx, ACCESS_INT);
192 EA = tcg_temp_new();
193 gen_addr_reg_index(ctx, EA);
194 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
195 tcg_gen_addi_tl(EA, EA, 8);
196 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
197 if (ctx->le_mode) {
198 gen_bswap16x8(xth, xtl, xth, xtl);
200 tcg_temp_free(EA);
201 tcg_temp_free_i64(xth);
202 tcg_temp_free_i64(xtl);
205 static void gen_lxvb16x(DisasContext *ctx)
207 TCGv EA;
208 TCGv_i64 xth;
209 TCGv_i64 xtl;
211 if (unlikely(!ctx->vsx_enabled)) {
212 gen_exception(ctx, POWERPC_EXCP_VSXU);
213 return;
215 xth = tcg_temp_new_i64();
216 xtl = tcg_temp_new_i64();
217 get_cpu_vsrh(xth, xT(ctx->opcode));
218 get_cpu_vsrl(xtl, xT(ctx->opcode));
219 gen_set_access_type(ctx, ACCESS_INT);
220 EA = tcg_temp_new();
221 gen_addr_reg_index(ctx, EA);
222 tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEQ);
223 tcg_gen_addi_tl(EA, EA, 8);
224 tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEQ);
225 tcg_temp_free(EA);
226 tcg_temp_free_i64(xth);
227 tcg_temp_free_i64(xtl);
230 #define VSX_VECTOR_LOAD_STORE(name, op, indexed) \
231 static void gen_##name(DisasContext *ctx) \
233 int xt; \
234 TCGv EA; \
235 TCGv_i64 xth; \
236 TCGv_i64 xtl; \
238 if (indexed) { \
239 xt = xT(ctx->opcode); \
240 } else { \
241 xt = DQxT(ctx->opcode); \
244 if (xt < 32) { \
245 if (unlikely(!ctx->vsx_enabled)) { \
246 gen_exception(ctx, POWERPC_EXCP_VSXU); \
247 return; \
249 } else { \
250 if (unlikely(!ctx->altivec_enabled)) { \
251 gen_exception(ctx, POWERPC_EXCP_VPU); \
252 return; \
255 xth = tcg_temp_new_i64(); \
256 xtl = tcg_temp_new_i64(); \
257 get_cpu_vsrh(xth, xt); \
258 get_cpu_vsrl(xtl, xt); \
259 gen_set_access_type(ctx, ACCESS_INT); \
260 EA = tcg_temp_new(); \
261 if (indexed) { \
262 gen_addr_reg_index(ctx, EA); \
263 } else { \
264 gen_addr_imm_index(ctx, EA, 0x0F); \
266 if (ctx->le_mode) { \
267 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_LEQ); \
268 set_cpu_vsrl(xt, xtl); \
269 tcg_gen_addi_tl(EA, EA, 8); \
270 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_LEQ); \
271 set_cpu_vsrh(xt, xth); \
272 } else { \
273 tcg_gen_qemu_##op(xth, EA, ctx->mem_idx, MO_BEQ); \
274 set_cpu_vsrh(xt, xth); \
275 tcg_gen_addi_tl(EA, EA, 8); \
276 tcg_gen_qemu_##op(xtl, EA, ctx->mem_idx, MO_BEQ); \
277 set_cpu_vsrl(xt, xtl); \
279 tcg_temp_free(EA); \
280 tcg_temp_free_i64(xth); \
281 tcg_temp_free_i64(xtl); \
284 VSX_VECTOR_LOAD_STORE(lxv, ld_i64, 0)
285 VSX_VECTOR_LOAD_STORE(stxv, st_i64, 0)
286 VSX_VECTOR_LOAD_STORE(lxvx, ld_i64, 1)
287 VSX_VECTOR_LOAD_STORE(stxvx, st_i64, 1)
289 #ifdef TARGET_PPC64
290 #define VSX_VECTOR_LOAD_STORE_LENGTH(name) \
291 static void gen_##name(DisasContext *ctx) \
293 TCGv EA, xt; \
295 if (xT(ctx->opcode) < 32) { \
296 if (unlikely(!ctx->vsx_enabled)) { \
297 gen_exception(ctx, POWERPC_EXCP_VSXU); \
298 return; \
300 } else { \
301 if (unlikely(!ctx->altivec_enabled)) { \
302 gen_exception(ctx, POWERPC_EXCP_VPU); \
303 return; \
306 EA = tcg_temp_new(); \
307 xt = tcg_const_tl(xT(ctx->opcode)); \
308 gen_set_access_type(ctx, ACCESS_INT); \
309 gen_addr_register(ctx, EA); \
310 gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]); \
311 tcg_temp_free(EA); \
312 tcg_temp_free(xt); \
315 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
316 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
317 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
318 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
319 #endif
321 #define VSX_LOAD_SCALAR_DS(name, operation) \
322 static void gen_##name(DisasContext *ctx) \
324 TCGv EA; \
325 TCGv_i64 xth; \
327 if (unlikely(!ctx->altivec_enabled)) { \
328 gen_exception(ctx, POWERPC_EXCP_VPU); \
329 return; \
331 xth = tcg_temp_new_i64(); \
332 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
333 gen_set_access_type(ctx, ACCESS_INT); \
334 EA = tcg_temp_new(); \
335 gen_addr_imm_index(ctx, EA, 0x03); \
336 gen_qemu_##operation(ctx, xth, EA); \
337 set_cpu_vsrh(rD(ctx->opcode) + 32, xth); \
338 /* NOTE: cpu_vsrl is undefined */ \
339 tcg_temp_free(EA); \
340 tcg_temp_free_i64(xth); \
343 VSX_LOAD_SCALAR_DS(lxsd, ld64_i64)
344 VSX_LOAD_SCALAR_DS(lxssp, ld32fs)
346 #define VSX_STORE_SCALAR(name, operation) \
347 static void gen_##name(DisasContext *ctx) \
349 TCGv EA; \
350 TCGv_i64 t0; \
351 if (unlikely(!ctx->vsx_enabled)) { \
352 gen_exception(ctx, POWERPC_EXCP_VSXU); \
353 return; \
355 t0 = tcg_temp_new_i64(); \
356 gen_set_access_type(ctx, ACCESS_INT); \
357 EA = tcg_temp_new(); \
358 gen_addr_reg_index(ctx, EA); \
359 get_cpu_vsrh(t0, xS(ctx->opcode)); \
360 gen_qemu_##operation(ctx, t0, EA); \
361 tcg_temp_free(EA); \
362 tcg_temp_free_i64(t0); \
365 VSX_STORE_SCALAR(stxsdx, st64_i64)
367 VSX_STORE_SCALAR(stxsibx, st8_i64)
368 VSX_STORE_SCALAR(stxsihx, st16_i64)
369 VSX_STORE_SCALAR(stxsiwx, st32_i64)
370 VSX_STORE_SCALAR(stxsspx, st32fs)
372 static void gen_stxvd2x(DisasContext *ctx)
374 TCGv EA;
375 TCGv_i64 t0;
376 if (unlikely(!ctx->vsx_enabled)) {
377 gen_exception(ctx, POWERPC_EXCP_VSXU);
378 return;
380 t0 = tcg_temp_new_i64();
381 gen_set_access_type(ctx, ACCESS_INT);
382 EA = tcg_temp_new();
383 gen_addr_reg_index(ctx, EA);
384 get_cpu_vsrh(t0, xS(ctx->opcode));
385 gen_qemu_st64_i64(ctx, t0, EA);
386 tcg_gen_addi_tl(EA, EA, 8);
387 get_cpu_vsrl(t0, xS(ctx->opcode));
388 gen_qemu_st64_i64(ctx, t0, EA);
389 tcg_temp_free(EA);
390 tcg_temp_free_i64(t0);
393 static void gen_stxvw4x(DisasContext *ctx)
395 TCGv EA;
396 TCGv_i64 xsh;
397 TCGv_i64 xsl;
399 if (unlikely(!ctx->vsx_enabled)) {
400 gen_exception(ctx, POWERPC_EXCP_VSXU);
401 return;
403 xsh = tcg_temp_new_i64();
404 xsl = tcg_temp_new_i64();
405 get_cpu_vsrh(xsh, xS(ctx->opcode));
406 get_cpu_vsrl(xsl, xS(ctx->opcode));
407 gen_set_access_type(ctx, ACCESS_INT);
408 EA = tcg_temp_new();
409 gen_addr_reg_index(ctx, EA);
410 if (ctx->le_mode) {
411 TCGv_i64 t0 = tcg_temp_new_i64();
412 TCGv_i64 t1 = tcg_temp_new_i64();
414 tcg_gen_shri_i64(t0, xsh, 32);
415 tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
416 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
417 tcg_gen_addi_tl(EA, EA, 8);
418 tcg_gen_shri_i64(t0, xsl, 32);
419 tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
420 tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEQ);
421 tcg_temp_free_i64(t0);
422 tcg_temp_free_i64(t1);
423 } else {
424 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
425 tcg_gen_addi_tl(EA, EA, 8);
426 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
428 tcg_temp_free(EA);
429 tcg_temp_free_i64(xsh);
430 tcg_temp_free_i64(xsl);
433 static void gen_stxvh8x(DisasContext *ctx)
435 TCGv EA;
436 TCGv_i64 xsh;
437 TCGv_i64 xsl;
439 if (unlikely(!ctx->vsx_enabled)) {
440 gen_exception(ctx, POWERPC_EXCP_VSXU);
441 return;
443 xsh = tcg_temp_new_i64();
444 xsl = tcg_temp_new_i64();
445 get_cpu_vsrh(xsh, xS(ctx->opcode));
446 get_cpu_vsrl(xsl, xS(ctx->opcode));
447 gen_set_access_type(ctx, ACCESS_INT);
448 EA = tcg_temp_new();
449 gen_addr_reg_index(ctx, EA);
450 if (ctx->le_mode) {
451 TCGv_i64 outh = tcg_temp_new_i64();
452 TCGv_i64 outl = tcg_temp_new_i64();
454 gen_bswap16x8(outh, outl, xsh, xsl);
455 tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEQ);
456 tcg_gen_addi_tl(EA, EA, 8);
457 tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEQ);
458 tcg_temp_free_i64(outh);
459 tcg_temp_free_i64(outl);
460 } else {
461 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
462 tcg_gen_addi_tl(EA, EA, 8);
463 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
465 tcg_temp_free(EA);
466 tcg_temp_free_i64(xsh);
467 tcg_temp_free_i64(xsl);
470 static void gen_stxvb16x(DisasContext *ctx)
472 TCGv EA;
473 TCGv_i64 xsh;
474 TCGv_i64 xsl;
476 if (unlikely(!ctx->vsx_enabled)) {
477 gen_exception(ctx, POWERPC_EXCP_VSXU);
478 return;
480 xsh = tcg_temp_new_i64();
481 xsl = tcg_temp_new_i64();
482 get_cpu_vsrh(xsh, xS(ctx->opcode));
483 get_cpu_vsrl(xsl, xS(ctx->opcode));
484 gen_set_access_type(ctx, ACCESS_INT);
485 EA = tcg_temp_new();
486 gen_addr_reg_index(ctx, EA);
487 tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEQ);
488 tcg_gen_addi_tl(EA, EA, 8);
489 tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEQ);
490 tcg_temp_free(EA);
491 tcg_temp_free_i64(xsh);
492 tcg_temp_free_i64(xsl);
495 #define VSX_STORE_SCALAR_DS(name, operation) \
496 static void gen_##name(DisasContext *ctx) \
498 TCGv EA; \
499 TCGv_i64 xth; \
501 if (unlikely(!ctx->altivec_enabled)) { \
502 gen_exception(ctx, POWERPC_EXCP_VPU); \
503 return; \
505 xth = tcg_temp_new_i64(); \
506 get_cpu_vsrh(xth, rD(ctx->opcode) + 32); \
507 gen_set_access_type(ctx, ACCESS_INT); \
508 EA = tcg_temp_new(); \
509 gen_addr_imm_index(ctx, EA, 0x03); \
510 gen_qemu_##operation(ctx, xth, EA); \
511 /* NOTE: cpu_vsrl is undefined */ \
512 tcg_temp_free(EA); \
513 tcg_temp_free_i64(xth); \
516 VSX_LOAD_SCALAR_DS(stxsd, st64_i64)
517 VSX_LOAD_SCALAR_DS(stxssp, st32fs)
519 static void gen_mfvsrwz(DisasContext *ctx)
521 if (xS(ctx->opcode) < 32) {
522 if (unlikely(!ctx->fpu_enabled)) {
523 gen_exception(ctx, POWERPC_EXCP_FPU);
524 return;
526 } else {
527 if (unlikely(!ctx->altivec_enabled)) {
528 gen_exception(ctx, POWERPC_EXCP_VPU);
529 return;
532 TCGv_i64 tmp = tcg_temp_new_i64();
533 TCGv_i64 xsh = tcg_temp_new_i64();
534 get_cpu_vsrh(xsh, xS(ctx->opcode));
535 tcg_gen_ext32u_i64(tmp, xsh);
536 tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
537 tcg_temp_free_i64(tmp);
538 tcg_temp_free_i64(xsh);
541 static void gen_mtvsrwa(DisasContext *ctx)
543 if (xS(ctx->opcode) < 32) {
544 if (unlikely(!ctx->fpu_enabled)) {
545 gen_exception(ctx, POWERPC_EXCP_FPU);
546 return;
548 } else {
549 if (unlikely(!ctx->altivec_enabled)) {
550 gen_exception(ctx, POWERPC_EXCP_VPU);
551 return;
554 TCGv_i64 tmp = tcg_temp_new_i64();
555 TCGv_i64 xsh = tcg_temp_new_i64();
556 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
557 tcg_gen_ext32s_i64(xsh, tmp);
558 set_cpu_vsrh(xT(ctx->opcode), xsh);
559 tcg_temp_free_i64(tmp);
560 tcg_temp_free_i64(xsh);
563 static void gen_mtvsrwz(DisasContext *ctx)
565 if (xS(ctx->opcode) < 32) {
566 if (unlikely(!ctx->fpu_enabled)) {
567 gen_exception(ctx, POWERPC_EXCP_FPU);
568 return;
570 } else {
571 if (unlikely(!ctx->altivec_enabled)) {
572 gen_exception(ctx, POWERPC_EXCP_VPU);
573 return;
576 TCGv_i64 tmp = tcg_temp_new_i64();
577 TCGv_i64 xsh = tcg_temp_new_i64();
578 tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
579 tcg_gen_ext32u_i64(xsh, tmp);
580 set_cpu_vsrh(xT(ctx->opcode), xsh);
581 tcg_temp_free_i64(tmp);
582 tcg_temp_free_i64(xsh);
585 #if defined(TARGET_PPC64)
586 static void gen_mfvsrd(DisasContext *ctx)
588 TCGv_i64 t0;
589 if (xS(ctx->opcode) < 32) {
590 if (unlikely(!ctx->fpu_enabled)) {
591 gen_exception(ctx, POWERPC_EXCP_FPU);
592 return;
594 } else {
595 if (unlikely(!ctx->altivec_enabled)) {
596 gen_exception(ctx, POWERPC_EXCP_VPU);
597 return;
600 t0 = tcg_temp_new_i64();
601 get_cpu_vsrh(t0, xS(ctx->opcode));
602 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
603 tcg_temp_free_i64(t0);
606 static void gen_mtvsrd(DisasContext *ctx)
608 TCGv_i64 t0;
609 if (xS(ctx->opcode) < 32) {
610 if (unlikely(!ctx->fpu_enabled)) {
611 gen_exception(ctx, POWERPC_EXCP_FPU);
612 return;
614 } else {
615 if (unlikely(!ctx->altivec_enabled)) {
616 gen_exception(ctx, POWERPC_EXCP_VPU);
617 return;
620 t0 = tcg_temp_new_i64();
621 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
622 set_cpu_vsrh(xT(ctx->opcode), t0);
623 tcg_temp_free_i64(t0);
626 static void gen_mfvsrld(DisasContext *ctx)
628 TCGv_i64 t0;
629 if (xS(ctx->opcode) < 32) {
630 if (unlikely(!ctx->vsx_enabled)) {
631 gen_exception(ctx, POWERPC_EXCP_VSXU);
632 return;
634 } else {
635 if (unlikely(!ctx->altivec_enabled)) {
636 gen_exception(ctx, POWERPC_EXCP_VPU);
637 return;
640 t0 = tcg_temp_new_i64();
641 get_cpu_vsrl(t0, xS(ctx->opcode));
642 tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
643 tcg_temp_free_i64(t0);
646 static void gen_mtvsrdd(DisasContext *ctx)
648 TCGv_i64 t0;
649 if (xT(ctx->opcode) < 32) {
650 if (unlikely(!ctx->vsx_enabled)) {
651 gen_exception(ctx, POWERPC_EXCP_VSXU);
652 return;
654 } else {
655 if (unlikely(!ctx->altivec_enabled)) {
656 gen_exception(ctx, POWERPC_EXCP_VPU);
657 return;
661 t0 = tcg_temp_new_i64();
662 if (!rA(ctx->opcode)) {
663 tcg_gen_movi_i64(t0, 0);
664 } else {
665 tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
667 set_cpu_vsrh(xT(ctx->opcode), t0);
669 tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
670 set_cpu_vsrl(xT(ctx->opcode), t0);
671 tcg_temp_free_i64(t0);
674 static void gen_mtvsrws(DisasContext *ctx)
676 TCGv_i64 t0;
677 if (xT(ctx->opcode) < 32) {
678 if (unlikely(!ctx->vsx_enabled)) {
679 gen_exception(ctx, POWERPC_EXCP_VSXU);
680 return;
682 } else {
683 if (unlikely(!ctx->altivec_enabled)) {
684 gen_exception(ctx, POWERPC_EXCP_VPU);
685 return;
689 t0 = tcg_temp_new_i64();
690 tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
691 cpu_gpr[rA(ctx->opcode)], 32, 32);
692 set_cpu_vsrl(xT(ctx->opcode), t0);
693 set_cpu_vsrh(xT(ctx->opcode), t0);
694 tcg_temp_free_i64(t0);
697 #endif
699 static void gen_xxpermdi(DisasContext *ctx)
701 TCGv_i64 xh, xl;
703 if (unlikely(!ctx->vsx_enabled)) {
704 gen_exception(ctx, POWERPC_EXCP_VSXU);
705 return;
708 xh = tcg_temp_new_i64();
709 xl = tcg_temp_new_i64();
711 if (unlikely((xT(ctx->opcode) == xA(ctx->opcode)) ||
712 (xT(ctx->opcode) == xB(ctx->opcode)))) {
713 if ((DM(ctx->opcode) & 2) == 0) {
714 get_cpu_vsrh(xh, xA(ctx->opcode));
715 } else {
716 get_cpu_vsrl(xh, xA(ctx->opcode));
718 if ((DM(ctx->opcode) & 1) == 0) {
719 get_cpu_vsrh(xl, xB(ctx->opcode));
720 } else {
721 get_cpu_vsrl(xl, xB(ctx->opcode));
724 set_cpu_vsrh(xT(ctx->opcode), xh);
725 set_cpu_vsrl(xT(ctx->opcode), xl);
726 } else {
727 if ((DM(ctx->opcode) & 2) == 0) {
728 get_cpu_vsrh(xh, xA(ctx->opcode));
729 set_cpu_vsrh(xT(ctx->opcode), xh);
730 } else {
731 get_cpu_vsrl(xh, xA(ctx->opcode));
732 set_cpu_vsrh(xT(ctx->opcode), xh);
734 if ((DM(ctx->opcode) & 1) == 0) {
735 get_cpu_vsrh(xl, xB(ctx->opcode));
736 set_cpu_vsrl(xT(ctx->opcode), xl);
737 } else {
738 get_cpu_vsrl(xl, xB(ctx->opcode));
739 set_cpu_vsrl(xT(ctx->opcode), xl);
742 tcg_temp_free_i64(xh);
743 tcg_temp_free_i64(xl);
746 #define OP_ABS 1
747 #define OP_NABS 2
748 #define OP_NEG 3
749 #define OP_CPSGN 4
750 #define SGN_MASK_DP 0x8000000000000000ull
751 #define SGN_MASK_SP 0x8000000080000000ull
753 #define VSX_SCALAR_MOVE(name, op, sgn_mask) \
754 static void glue(gen_, name)(DisasContext *ctx) \
756 TCGv_i64 xb, sgm; \
757 if (unlikely(!ctx->vsx_enabled)) { \
758 gen_exception(ctx, POWERPC_EXCP_VSXU); \
759 return; \
761 xb = tcg_temp_new_i64(); \
762 sgm = tcg_temp_new_i64(); \
763 get_cpu_vsrh(xb, xB(ctx->opcode)); \
764 tcg_gen_movi_i64(sgm, sgn_mask); \
765 switch (op) { \
766 case OP_ABS: { \
767 tcg_gen_andc_i64(xb, xb, sgm); \
768 break; \
770 case OP_NABS: { \
771 tcg_gen_or_i64(xb, xb, sgm); \
772 break; \
774 case OP_NEG: { \
775 tcg_gen_xor_i64(xb, xb, sgm); \
776 break; \
778 case OP_CPSGN: { \
779 TCGv_i64 xa = tcg_temp_new_i64(); \
780 get_cpu_vsrh(xa, xA(ctx->opcode)); \
781 tcg_gen_and_i64(xa, xa, sgm); \
782 tcg_gen_andc_i64(xb, xb, sgm); \
783 tcg_gen_or_i64(xb, xb, xa); \
784 tcg_temp_free_i64(xa); \
785 break; \
788 set_cpu_vsrh(xT(ctx->opcode), xb); \
789 tcg_temp_free_i64(xb); \
790 tcg_temp_free_i64(sgm); \
793 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
794 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
795 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
796 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
798 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask) \
799 static void glue(gen_, name)(DisasContext *ctx) \
801 int xa; \
802 int xt = rD(ctx->opcode) + 32; \
803 int xb = rB(ctx->opcode) + 32; \
804 TCGv_i64 xah, xbh, xbl, sgm, tmp; \
806 if (unlikely(!ctx->vsx_enabled)) { \
807 gen_exception(ctx, POWERPC_EXCP_VSXU); \
808 return; \
810 xbh = tcg_temp_new_i64(); \
811 xbl = tcg_temp_new_i64(); \
812 sgm = tcg_temp_new_i64(); \
813 tmp = tcg_temp_new_i64(); \
814 get_cpu_vsrh(xbh, xb); \
815 get_cpu_vsrl(xbl, xb); \
816 tcg_gen_movi_i64(sgm, sgn_mask); \
817 switch (op) { \
818 case OP_ABS: \
819 tcg_gen_andc_i64(xbh, xbh, sgm); \
820 break; \
821 case OP_NABS: \
822 tcg_gen_or_i64(xbh, xbh, sgm); \
823 break; \
824 case OP_NEG: \
825 tcg_gen_xor_i64(xbh, xbh, sgm); \
826 break; \
827 case OP_CPSGN: \
828 xah = tcg_temp_new_i64(); \
829 xa = rA(ctx->opcode) + 32; \
830 get_cpu_vsrh(tmp, xa); \
831 tcg_gen_and_i64(xah, tmp, sgm); \
832 tcg_gen_andc_i64(xbh, xbh, sgm); \
833 tcg_gen_or_i64(xbh, xbh, xah); \
834 tcg_temp_free_i64(xah); \
835 break; \
837 set_cpu_vsrh(xt, xbh); \
838 set_cpu_vsrl(xt, xbl); \
839 tcg_temp_free_i64(xbl); \
840 tcg_temp_free_i64(xbh); \
841 tcg_temp_free_i64(sgm); \
842 tcg_temp_free_i64(tmp); \
845 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
846 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
847 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
848 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
850 #define VSX_VECTOR_MOVE(name, op, sgn_mask) \
851 static void glue(gen_, name)(DisasContext *ctx) \
853 TCGv_i64 xbh, xbl, sgm; \
854 if (unlikely(!ctx->vsx_enabled)) { \
855 gen_exception(ctx, POWERPC_EXCP_VSXU); \
856 return; \
858 xbh = tcg_temp_new_i64(); \
859 xbl = tcg_temp_new_i64(); \
860 sgm = tcg_temp_new_i64(); \
861 set_cpu_vsrh(xB(ctx->opcode), xbh); \
862 set_cpu_vsrl(xB(ctx->opcode), xbl); \
863 tcg_gen_movi_i64(sgm, sgn_mask); \
864 switch (op) { \
865 case OP_ABS: { \
866 tcg_gen_andc_i64(xbh, xbh, sgm); \
867 tcg_gen_andc_i64(xbl, xbl, sgm); \
868 break; \
870 case OP_NABS: { \
871 tcg_gen_or_i64(xbh, xbh, sgm); \
872 tcg_gen_or_i64(xbl, xbl, sgm); \
873 break; \
875 case OP_NEG: { \
876 tcg_gen_xor_i64(xbh, xbh, sgm); \
877 tcg_gen_xor_i64(xbl, xbl, sgm); \
878 break; \
880 case OP_CPSGN: { \
881 TCGv_i64 xah = tcg_temp_new_i64(); \
882 TCGv_i64 xal = tcg_temp_new_i64(); \
883 get_cpu_vsrh(xah, xA(ctx->opcode)); \
884 get_cpu_vsrl(xal, xA(ctx->opcode)); \
885 tcg_gen_and_i64(xah, xah, sgm); \
886 tcg_gen_and_i64(xal, xal, sgm); \
887 tcg_gen_andc_i64(xbh, xbh, sgm); \
888 tcg_gen_andc_i64(xbl, xbl, sgm); \
889 tcg_gen_or_i64(xbh, xbh, xah); \
890 tcg_gen_or_i64(xbl, xbl, xal); \
891 tcg_temp_free_i64(xah); \
892 tcg_temp_free_i64(xal); \
893 break; \
896 set_cpu_vsrh(xT(ctx->opcode), xbh); \
897 set_cpu_vsrl(xT(ctx->opcode), xbl); \
898 tcg_temp_free_i64(xbh); \
899 tcg_temp_free_i64(xbl); \
900 tcg_temp_free_i64(sgm); \
903 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
904 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
905 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
906 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
907 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
908 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
909 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
910 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
912 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type) \
913 static void gen_##name(DisasContext *ctx) \
915 TCGv_i32 opc; \
916 if (unlikely(!ctx->vsx_enabled)) { \
917 gen_exception(ctx, POWERPC_EXCP_VSXU); \
918 return; \
920 opc = tcg_const_i32(ctx->opcode); \
921 gen_helper_##name(cpu_env, opc); \
922 tcg_temp_free_i32(opc); \
925 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
926 static void gen_##name(DisasContext *ctx) \
928 TCGv_i64 t0; \
929 TCGv_i64 t1; \
930 if (unlikely(!ctx->vsx_enabled)) { \
931 gen_exception(ctx, POWERPC_EXCP_VSXU); \
932 return; \
934 t0 = tcg_temp_new_i64(); \
935 t1 = tcg_temp_new_i64(); \
936 get_cpu_vsrh(t0, xB(ctx->opcode)); \
937 gen_helper_##name(t1, cpu_env, t0); \
938 set_cpu_vsrh(xT(ctx->opcode), t1); \
939 tcg_temp_free_i64(t0); \
940 tcg_temp_free_i64(t1); \
943 GEN_VSX_HELPER_2(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
944 GEN_VSX_HELPER_2(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
945 GEN_VSX_HELPER_2(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
946 GEN_VSX_HELPER_2(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
947 GEN_VSX_HELPER_2(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
948 GEN_VSX_HELPER_2(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
949 GEN_VSX_HELPER_2(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
950 GEN_VSX_HELPER_2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
951 GEN_VSX_HELPER_2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
952 GEN_VSX_HELPER_2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
953 GEN_VSX_HELPER_2(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
954 GEN_VSX_HELPER_2(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
955 GEN_VSX_HELPER_2(xsmaddadp, 0x04, 0x04, 0, PPC2_VSX)
956 GEN_VSX_HELPER_2(xsmaddmdp, 0x04, 0x05, 0, PPC2_VSX)
957 GEN_VSX_HELPER_2(xsmsubadp, 0x04, 0x06, 0, PPC2_VSX)
958 GEN_VSX_HELPER_2(xsmsubmdp, 0x04, 0x07, 0, PPC2_VSX)
959 GEN_VSX_HELPER_2(xsnmaddadp, 0x04, 0x14, 0, PPC2_VSX)
960 GEN_VSX_HELPER_2(xsnmaddmdp, 0x04, 0x15, 0, PPC2_VSX)
961 GEN_VSX_HELPER_2(xsnmsubadp, 0x04, 0x16, 0, PPC2_VSX)
962 GEN_VSX_HELPER_2(xsnmsubmdp, 0x04, 0x17, 0, PPC2_VSX)
963 GEN_VSX_HELPER_2(xscmpeqdp, 0x0C, 0x00, 0, PPC2_ISA300)
964 GEN_VSX_HELPER_2(xscmpgtdp, 0x0C, 0x01, 0, PPC2_ISA300)
965 GEN_VSX_HELPER_2(xscmpgedp, 0x0C, 0x02, 0, PPC2_ISA300)
966 GEN_VSX_HELPER_2(xscmpnedp, 0x0C, 0x03, 0, PPC2_ISA300)
967 GEN_VSX_HELPER_2(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
968 GEN_VSX_HELPER_2(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
969 GEN_VSX_HELPER_2(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
970 GEN_VSX_HELPER_2(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
971 GEN_VSX_HELPER_2(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
972 GEN_VSX_HELPER_2(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
973 GEN_VSX_HELPER_2(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
974 GEN_VSX_HELPER_2(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
975 GEN_VSX_HELPER_2(xsmaxcdp, 0x00, 0x10, 0, PPC2_ISA300)
976 GEN_VSX_HELPER_2(xsmincdp, 0x00, 0x11, 0, PPC2_ISA300)
977 GEN_VSX_HELPER_2(xsmaxjdp, 0x00, 0x12, 0, PPC2_ISA300)
978 GEN_VSX_HELPER_2(xsminjdp, 0x00, 0x12, 0, PPC2_ISA300)
979 GEN_VSX_HELPER_2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
980 GEN_VSX_HELPER_2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
981 GEN_VSX_HELPER_2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
982 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
983 GEN_VSX_HELPER_2(xscvqpdp, 0x04, 0x1A, 0x14, PPC2_ISA300)
984 GEN_VSX_HELPER_2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
985 GEN_VSX_HELPER_2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
986 GEN_VSX_HELPER_2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
987 GEN_VSX_HELPER_2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
988 GEN_VSX_HELPER_2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
989 GEN_VSX_HELPER_2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
990 GEN_VSX_HELPER_2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
991 GEN_VSX_HELPER_XT_XB_ENV(xscvspdpn, 0x16, 0x14, 0, PPC2_VSX207)
992 GEN_VSX_HELPER_2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
993 GEN_VSX_HELPER_2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
994 GEN_VSX_HELPER_2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
995 GEN_VSX_HELPER_2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
996 GEN_VSX_HELPER_2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
997 GEN_VSX_HELPER_2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
998 GEN_VSX_HELPER_2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
999 GEN_VSX_HELPER_2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1000 GEN_VSX_HELPER_2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1001 GEN_VSX_HELPER_2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1002 GEN_VSX_HELPER_2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1003 GEN_VSX_HELPER_2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1004 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1006 GEN_VSX_HELPER_2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1007 GEN_VSX_HELPER_2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1008 GEN_VSX_HELPER_2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1009 GEN_VSX_HELPER_2(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1011 GEN_VSX_HELPER_2(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1012 GEN_VSX_HELPER_2(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1013 GEN_VSX_HELPER_2(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1014 GEN_VSX_HELPER_2(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1015 GEN_VSX_HELPER_2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1016 GEN_VSX_HELPER_2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1017 GEN_VSX_HELPER_2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1018 GEN_VSX_HELPER_2(xsmaddasp, 0x04, 0x00, 0, PPC2_VSX207)
1019 GEN_VSX_HELPER_2(xsmaddmsp, 0x04, 0x01, 0, PPC2_VSX207)
1020 GEN_VSX_HELPER_2(xsmsubasp, 0x04, 0x02, 0, PPC2_VSX207)
1021 GEN_VSX_HELPER_2(xsmsubmsp, 0x04, 0x03, 0, PPC2_VSX207)
1022 GEN_VSX_HELPER_2(xsnmaddasp, 0x04, 0x10, 0, PPC2_VSX207)
1023 GEN_VSX_HELPER_2(xsnmaddmsp, 0x04, 0x11, 0, PPC2_VSX207)
1024 GEN_VSX_HELPER_2(xsnmsubasp, 0x04, 0x12, 0, PPC2_VSX207)
1025 GEN_VSX_HELPER_2(xsnmsubmsp, 0x04, 0x13, 0, PPC2_VSX207)
1026 GEN_VSX_HELPER_2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1027 GEN_VSX_HELPER_2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1028 GEN_VSX_HELPER_2(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1029 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1030 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1032 GEN_VSX_HELPER_2(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1033 GEN_VSX_HELPER_2(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1034 GEN_VSX_HELPER_2(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1035 GEN_VSX_HELPER_2(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1036 GEN_VSX_HELPER_2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1037 GEN_VSX_HELPER_2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1038 GEN_VSX_HELPER_2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1039 GEN_VSX_HELPER_2(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1040 GEN_VSX_HELPER_2(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1041 GEN_VSX_HELPER_2(xvmaddadp, 0x04, 0x0C, 0, PPC2_VSX)
1042 GEN_VSX_HELPER_2(xvmaddmdp, 0x04, 0x0D, 0, PPC2_VSX)
1043 GEN_VSX_HELPER_2(xvmsubadp, 0x04, 0x0E, 0, PPC2_VSX)
1044 GEN_VSX_HELPER_2(xvmsubmdp, 0x04, 0x0F, 0, PPC2_VSX)
1045 GEN_VSX_HELPER_2(xvnmaddadp, 0x04, 0x1C, 0, PPC2_VSX)
1046 GEN_VSX_HELPER_2(xvnmaddmdp, 0x04, 0x1D, 0, PPC2_VSX)
1047 GEN_VSX_HELPER_2(xvnmsubadp, 0x04, 0x1E, 0, PPC2_VSX)
1048 GEN_VSX_HELPER_2(xvnmsubmdp, 0x04, 0x1F, 0, PPC2_VSX)
1049 GEN_VSX_HELPER_2(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1050 GEN_VSX_HELPER_2(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1051 GEN_VSX_HELPER_2(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
1052 GEN_VSX_HELPER_2(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
1053 GEN_VSX_HELPER_2(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
1054 GEN_VSX_HELPER_2(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
1055 GEN_VSX_HELPER_2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1056 GEN_VSX_HELPER_2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1057 GEN_VSX_HELPER_2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1058 GEN_VSX_HELPER_2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1059 GEN_VSX_HELPER_2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1060 GEN_VSX_HELPER_2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1061 GEN_VSX_HELPER_2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1062 GEN_VSX_HELPER_2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1063 GEN_VSX_HELPER_2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1064 GEN_VSX_HELPER_2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1065 GEN_VSX_HELPER_2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1066 GEN_VSX_HELPER_2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1067 GEN_VSX_HELPER_2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1068 GEN_VSX_HELPER_2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_2(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_2(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1072 GEN_VSX_HELPER_2(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_2(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1074 GEN_VSX_HELPER_2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1075 GEN_VSX_HELPER_2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_2(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_2(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_2(xvmaddasp, 0x04, 0x08, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_2(xvmaddmsp, 0x04, 0x09, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_2(xvmsubasp, 0x04, 0x0A, 0, PPC2_VSX)
1082 GEN_VSX_HELPER_2(xvmsubmsp, 0x04, 0x0B, 0, PPC2_VSX)
1083 GEN_VSX_HELPER_2(xvnmaddasp, 0x04, 0x18, 0, PPC2_VSX)
1084 GEN_VSX_HELPER_2(xvnmaddmsp, 0x04, 0x19, 0, PPC2_VSX)
1085 GEN_VSX_HELPER_2(xvnmsubasp, 0x04, 0x1A, 0, PPC2_VSX)
1086 GEN_VSX_HELPER_2(xvnmsubmsp, 0x04, 0x1B, 0, PPC2_VSX)
1087 GEN_VSX_HELPER_2(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1088 GEN_VSX_HELPER_2(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1089 GEN_VSX_HELPER_2(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
1090 GEN_VSX_HELPER_2(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
1091 GEN_VSX_HELPER_2(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
1092 GEN_VSX_HELPER_2(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
1093 GEN_VSX_HELPER_2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1094 GEN_VSX_HELPER_2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1095 GEN_VSX_HELPER_2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1096 GEN_VSX_HELPER_2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1097 GEN_VSX_HELPER_2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1098 GEN_VSX_HELPER_2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1099 GEN_VSX_HELPER_2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1100 GEN_VSX_HELPER_2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_2(xxperm, 0x08, 0x03, 0, PPC2_ISA300)
1112 GEN_VSX_HELPER_2(xxpermr, 0x08, 0x07, 0, PPC2_ISA300)
1114 static void gen_xxbrd(DisasContext *ctx)
1116 TCGv_i64 xth;
1117 TCGv_i64 xtl;
1118 TCGv_i64 xbh;
1119 TCGv_i64 xbl;
1121 if (unlikely(!ctx->vsx_enabled)) {
1122 gen_exception(ctx, POWERPC_EXCP_VSXU);
1123 return;
1125 xth = tcg_temp_new_i64();
1126 xtl = tcg_temp_new_i64();
1127 xbh = tcg_temp_new_i64();
1128 xbl = tcg_temp_new_i64();
1129 get_cpu_vsrh(xbh, xB(ctx->opcode));
1130 get_cpu_vsrl(xbl, xB(ctx->opcode));
1132 tcg_gen_bswap64_i64(xth, xbh);
1133 tcg_gen_bswap64_i64(xtl, xbl);
1134 set_cpu_vsrh(xT(ctx->opcode), xth);
1135 set_cpu_vsrl(xT(ctx->opcode), xtl);
1137 tcg_temp_free_i64(xth);
1138 tcg_temp_free_i64(xtl);
1139 tcg_temp_free_i64(xbh);
1140 tcg_temp_free_i64(xbl);
1143 static void gen_xxbrh(DisasContext *ctx)
1145 TCGv_i64 xth;
1146 TCGv_i64 xtl;
1147 TCGv_i64 xbh;
1148 TCGv_i64 xbl;
1150 if (unlikely(!ctx->vsx_enabled)) {
1151 gen_exception(ctx, POWERPC_EXCP_VSXU);
1152 return;
1154 xth = tcg_temp_new_i64();
1155 xtl = tcg_temp_new_i64();
1156 xbh = tcg_temp_new_i64();
1157 xbl = tcg_temp_new_i64();
1158 get_cpu_vsrh(xbh, xB(ctx->opcode));
1159 get_cpu_vsrl(xbl, xB(ctx->opcode));
1161 gen_bswap16x8(xth, xtl, xbh, xbl);
1162 set_cpu_vsrh(xT(ctx->opcode), xth);
1163 set_cpu_vsrl(xT(ctx->opcode), xtl);
1165 tcg_temp_free_i64(xth);
1166 tcg_temp_free_i64(xtl);
1167 tcg_temp_free_i64(xbh);
1168 tcg_temp_free_i64(xbl);
1171 static void gen_xxbrq(DisasContext *ctx)
1173 TCGv_i64 xth;
1174 TCGv_i64 xtl;
1175 TCGv_i64 xbh;
1176 TCGv_i64 xbl;
1177 TCGv_i64 t0;
1179 if (unlikely(!ctx->vsx_enabled)) {
1180 gen_exception(ctx, POWERPC_EXCP_VSXU);
1181 return;
1183 xth = tcg_temp_new_i64();
1184 xtl = tcg_temp_new_i64();
1185 xbh = tcg_temp_new_i64();
1186 xbl = tcg_temp_new_i64();
1187 get_cpu_vsrh(xbh, xB(ctx->opcode));
1188 get_cpu_vsrl(xbl, xB(ctx->opcode));
1189 t0 = tcg_temp_new_i64();
1191 tcg_gen_bswap64_i64(t0, xbl);
1192 tcg_gen_bswap64_i64(xtl, xbh);
1193 set_cpu_vsrl(xT(ctx->opcode), xtl);
1194 tcg_gen_mov_i64(xth, t0);
1195 set_cpu_vsrh(xT(ctx->opcode), xth);
1197 tcg_temp_free_i64(t0);
1198 tcg_temp_free_i64(xth);
1199 tcg_temp_free_i64(xtl);
1200 tcg_temp_free_i64(xbh);
1201 tcg_temp_free_i64(xbl);
1204 static void gen_xxbrw(DisasContext *ctx)
1206 TCGv_i64 xth;
1207 TCGv_i64 xtl;
1208 TCGv_i64 xbh;
1209 TCGv_i64 xbl;
1211 if (unlikely(!ctx->vsx_enabled)) {
1212 gen_exception(ctx, POWERPC_EXCP_VSXU);
1213 return;
1215 xth = tcg_temp_new_i64();
1216 xtl = tcg_temp_new_i64();
1217 xbh = tcg_temp_new_i64();
1218 xbl = tcg_temp_new_i64();
1219 get_cpu_vsrh(xbh, xB(ctx->opcode));
1220 get_cpu_vsrl(xbl, xB(ctx->opcode));
1222 gen_bswap32x4(xth, xtl, xbh, xbl);
1223 set_cpu_vsrh(xT(ctx->opcode), xth);
1224 set_cpu_vsrl(xT(ctx->opcode), xtl);
1226 tcg_temp_free_i64(xth);
1227 tcg_temp_free_i64(xtl);
1228 tcg_temp_free_i64(xbh);
1229 tcg_temp_free_i64(xbl);
1232 #define VSX_LOGICAL(name, vece, tcg_op) \
1233 static void glue(gen_, name)(DisasContext *ctx) \
1235 if (unlikely(!ctx->vsx_enabled)) { \
1236 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1237 return; \
1239 tcg_op(vece, vsr_full_offset(xT(ctx->opcode)), \
1240 vsr_full_offset(xA(ctx->opcode)), \
1241 vsr_full_offset(xB(ctx->opcode)), 16, 16); \
1244 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1245 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1246 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1247 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1248 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1249 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1250 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1251 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1253 #define VSX_XXMRG(name, high) \
1254 static void glue(gen_, name)(DisasContext *ctx) \
1256 TCGv_i64 a0, a1, b0, b1, tmp; \
1257 if (unlikely(!ctx->vsx_enabled)) { \
1258 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1259 return; \
1261 a0 = tcg_temp_new_i64(); \
1262 a1 = tcg_temp_new_i64(); \
1263 b0 = tcg_temp_new_i64(); \
1264 b1 = tcg_temp_new_i64(); \
1265 tmp = tcg_temp_new_i64(); \
1266 if (high) { \
1267 get_cpu_vsrh(a0, xA(ctx->opcode)); \
1268 get_cpu_vsrh(a1, xA(ctx->opcode)); \
1269 get_cpu_vsrh(b0, xB(ctx->opcode)); \
1270 get_cpu_vsrh(b1, xB(ctx->opcode)); \
1271 } else { \
1272 get_cpu_vsrl(a0, xA(ctx->opcode)); \
1273 get_cpu_vsrl(a1, xA(ctx->opcode)); \
1274 get_cpu_vsrl(b0, xB(ctx->opcode)); \
1275 get_cpu_vsrl(b1, xB(ctx->opcode)); \
1277 tcg_gen_shri_i64(a0, a0, 32); \
1278 tcg_gen_shri_i64(b0, b0, 32); \
1279 tcg_gen_deposit_i64(tmp, b0, a0, 32, 32); \
1280 set_cpu_vsrh(xT(ctx->opcode), tmp); \
1281 tcg_gen_deposit_i64(tmp, b1, a1, 32, 32); \
1282 set_cpu_vsrl(xT(ctx->opcode), tmp); \
1283 tcg_temp_free_i64(a0); \
1284 tcg_temp_free_i64(a1); \
1285 tcg_temp_free_i64(b0); \
1286 tcg_temp_free_i64(b1); \
1287 tcg_temp_free_i64(tmp); \
1290 VSX_XXMRG(xxmrghw, 1)
1291 VSX_XXMRG(xxmrglw, 0)
1293 static void xxsel_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c)
1295 tcg_gen_and_i64(b, b, c);
1296 tcg_gen_andc_i64(a, a, c);
1297 tcg_gen_or_i64(t, a, b);
1300 static void xxsel_vec(unsigned vece, TCGv_vec t, TCGv_vec a,
1301 TCGv_vec b, TCGv_vec c)
1303 tcg_gen_and_vec(vece, b, b, c);
1304 tcg_gen_andc_vec(vece, a, a, c);
1305 tcg_gen_or_vec(vece, t, a, b);
1308 static void gen_xxsel(DisasContext *ctx)
1310 static const GVecGen4 g = {
1311 .fni8 = xxsel_i64,
1312 .fniv = xxsel_vec,
1313 .vece = MO_64,
1315 int rt = xT(ctx->opcode);
1316 int ra = xA(ctx->opcode);
1317 int rb = xB(ctx->opcode);
1318 int rc = xC(ctx->opcode);
1320 if (unlikely(!ctx->vsx_enabled)) {
1321 gen_exception(ctx, POWERPC_EXCP_VSXU);
1322 return;
1324 tcg_gen_gvec_4(vsr_full_offset(rt), vsr_full_offset(ra),
1325 vsr_full_offset(rb), vsr_full_offset(rc), 16, 16, &g);
1328 static void gen_xxspltw(DisasContext *ctx)
1330 int rt = xT(ctx->opcode);
1331 int rb = xB(ctx->opcode);
1332 int uim = UIM(ctx->opcode);
1333 int tofs, bofs;
1335 if (unlikely(!ctx->vsx_enabled)) {
1336 gen_exception(ctx, POWERPC_EXCP_VSXU);
1337 return;
1340 tofs = vsr_full_offset(rt);
1341 bofs = vsr_full_offset(rb);
1342 bofs += uim << MO_32;
1343 #ifndef HOST_WORDS_BIG_ENDIAN
1344 bofs ^= 8 | 4;
1345 #endif
1347 tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1350 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1352 static void gen_xxspltib(DisasContext *ctx)
1354 uint8_t uim8 = IMM8(ctx->opcode);
1355 int rt = xT(ctx->opcode);
1357 if (rt < 32) {
1358 if (unlikely(!ctx->vsx_enabled)) {
1359 gen_exception(ctx, POWERPC_EXCP_VSXU);
1360 return;
1362 } else {
1363 if (unlikely(!ctx->altivec_enabled)) {
1364 gen_exception(ctx, POWERPC_EXCP_VPU);
1365 return;
1368 tcg_gen_gvec_dup8i(vsr_full_offset(rt), 16, 16, uim8);
1371 static void gen_xxsldwi(DisasContext *ctx)
1373 TCGv_i64 xth, xtl;
1374 if (unlikely(!ctx->vsx_enabled)) {
1375 gen_exception(ctx, POWERPC_EXCP_VSXU);
1376 return;
1378 xth = tcg_temp_new_i64();
1379 xtl = tcg_temp_new_i64();
1381 switch (SHW(ctx->opcode)) {
1382 case 0: {
1383 get_cpu_vsrh(xth, xA(ctx->opcode));
1384 get_cpu_vsrl(xtl, xA(ctx->opcode));
1385 break;
1387 case 1: {
1388 TCGv_i64 t0 = tcg_temp_new_i64();
1389 get_cpu_vsrh(xth, xA(ctx->opcode));
1390 tcg_gen_shli_i64(xth, xth, 32);
1391 get_cpu_vsrl(t0, xA(ctx->opcode));
1392 tcg_gen_shri_i64(t0, t0, 32);
1393 tcg_gen_or_i64(xth, xth, t0);
1394 get_cpu_vsrl(xtl, xA(ctx->opcode));
1395 tcg_gen_shli_i64(xtl, xtl, 32);
1396 get_cpu_vsrh(t0, xB(ctx->opcode));
1397 tcg_gen_shri_i64(t0, t0, 32);
1398 tcg_gen_or_i64(xtl, xtl, t0);
1399 tcg_temp_free_i64(t0);
1400 break;
1402 case 2: {
1403 get_cpu_vsrl(xth, xA(ctx->opcode));
1404 get_cpu_vsrh(xtl, xB(ctx->opcode));
1405 break;
1407 case 3: {
1408 TCGv_i64 t0 = tcg_temp_new_i64();
1409 get_cpu_vsrl(xth, xA(ctx->opcode));
1410 tcg_gen_shli_i64(xth, xth, 32);
1411 get_cpu_vsrh(t0, xB(ctx->opcode));
1412 tcg_gen_shri_i64(t0, t0, 32);
1413 tcg_gen_or_i64(xth, xth, t0);
1414 get_cpu_vsrh(xtl, xB(ctx->opcode));
1415 tcg_gen_shli_i64(xtl, xtl, 32);
1416 get_cpu_vsrl(t0, xB(ctx->opcode));
1417 tcg_gen_shri_i64(t0, t0, 32);
1418 tcg_gen_or_i64(xtl, xtl, t0);
1419 tcg_temp_free_i64(t0);
1420 break;
1424 set_cpu_vsrh(xT(ctx->opcode), xth);
1425 set_cpu_vsrl(xT(ctx->opcode), xtl);
1427 tcg_temp_free_i64(xth);
1428 tcg_temp_free_i64(xtl);
1431 #define VSX_EXTRACT_INSERT(name) \
1432 static void gen_##name(DisasContext *ctx) \
1434 TCGv xt, xb; \
1435 TCGv_i32 t0; \
1436 TCGv_i64 t1; \
1437 uint8_t uimm = UIMM4(ctx->opcode); \
1439 if (unlikely(!ctx->vsx_enabled)) { \
1440 gen_exception(ctx, POWERPC_EXCP_VSXU); \
1441 return; \
1443 xt = tcg_const_tl(xT(ctx->opcode)); \
1444 xb = tcg_const_tl(xB(ctx->opcode)); \
1445 t0 = tcg_temp_new_i32(); \
1446 t1 = tcg_temp_new_i64(); \
1447 /* \
1448 * uimm > 15 out of bound and for \
1449 * uimm > 12 handle as per hardware in helper \
1450 */ \
1451 if (uimm > 15) { \
1452 tcg_gen_movi_i64(t1, 0); \
1453 set_cpu_vsrh(xT(ctx->opcode), t1); \
1454 set_cpu_vsrl(xT(ctx->opcode), t1); \
1455 return; \
1457 tcg_gen_movi_i32(t0, uimm); \
1458 gen_helper_##name(cpu_env, xt, xb, t0); \
1459 tcg_temp_free(xb); \
1460 tcg_temp_free(xt); \
1461 tcg_temp_free_i32(t0); \
1462 tcg_temp_free_i64(t1); \
1465 VSX_EXTRACT_INSERT(xxextractuw)
1466 VSX_EXTRACT_INSERT(xxinsertw)
1468 #ifdef TARGET_PPC64
1469 static void gen_xsxexpdp(DisasContext *ctx)
1471 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1472 TCGv_i64 t0;
1473 if (unlikely(!ctx->vsx_enabled)) {
1474 gen_exception(ctx, POWERPC_EXCP_VSXU);
1475 return;
1477 t0 = tcg_temp_new_i64();
1478 get_cpu_vsrh(t0, xB(ctx->opcode));
1479 tcg_gen_extract_i64(rt, t0, 52, 11);
1480 tcg_temp_free_i64(t0);
1483 static void gen_xsxexpqp(DisasContext *ctx)
1485 TCGv_i64 xth;
1486 TCGv_i64 xtl;
1487 TCGv_i64 xbh;
1489 if (unlikely(!ctx->vsx_enabled)) {
1490 gen_exception(ctx, POWERPC_EXCP_VSXU);
1491 return;
1493 xth = tcg_temp_new_i64();
1494 xtl = tcg_temp_new_i64();
1495 xbh = tcg_temp_new_i64();
1496 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1498 tcg_gen_extract_i64(xth, xbh, 48, 15);
1499 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1500 tcg_gen_movi_i64(xtl, 0);
1501 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1503 tcg_temp_free_i64(xbh);
1504 tcg_temp_free_i64(xth);
1505 tcg_temp_free_i64(xtl);
1508 static void gen_xsiexpdp(DisasContext *ctx)
1510 TCGv_i64 xth;
1511 TCGv ra = cpu_gpr[rA(ctx->opcode)];
1512 TCGv rb = cpu_gpr[rB(ctx->opcode)];
1513 TCGv_i64 t0;
1515 if (unlikely(!ctx->vsx_enabled)) {
1516 gen_exception(ctx, POWERPC_EXCP_VSXU);
1517 return;
1519 t0 = tcg_temp_new_i64();
1520 xth = tcg_temp_new_i64();
1521 tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1522 tcg_gen_andi_i64(t0, rb, 0x7FF);
1523 tcg_gen_shli_i64(t0, t0, 52);
1524 tcg_gen_or_i64(xth, xth, t0);
1525 set_cpu_vsrh(xT(ctx->opcode), xth);
1526 /* dword[1] is undefined */
1527 tcg_temp_free_i64(t0);
1528 tcg_temp_free_i64(xth);
1531 static void gen_xsiexpqp(DisasContext *ctx)
1533 TCGv_i64 xth;
1534 TCGv_i64 xtl;
1535 TCGv_i64 xah;
1536 TCGv_i64 xal;
1537 TCGv_i64 xbh;
1538 TCGv_i64 t0;
1540 if (unlikely(!ctx->vsx_enabled)) {
1541 gen_exception(ctx, POWERPC_EXCP_VSXU);
1542 return;
1544 xth = tcg_temp_new_i64();
1545 xtl = tcg_temp_new_i64();
1546 xah = tcg_temp_new_i64();
1547 xal = tcg_temp_new_i64();
1548 get_cpu_vsrh(xah, rA(ctx->opcode) + 32);
1549 get_cpu_vsrl(xal, rA(ctx->opcode) + 32);
1550 xbh = tcg_temp_new_i64();
1551 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1552 t0 = tcg_temp_new_i64();
1554 tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1555 tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1556 tcg_gen_shli_i64(t0, t0, 48);
1557 tcg_gen_or_i64(xth, xth, t0);
1558 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1559 tcg_gen_mov_i64(xtl, xal);
1560 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1562 tcg_temp_free_i64(t0);
1563 tcg_temp_free_i64(xth);
1564 tcg_temp_free_i64(xtl);
1565 tcg_temp_free_i64(xah);
1566 tcg_temp_free_i64(xal);
1567 tcg_temp_free_i64(xbh);
1570 static void gen_xsxsigdp(DisasContext *ctx)
1572 TCGv rt = cpu_gpr[rD(ctx->opcode)];
1573 TCGv_i64 t0, t1, zr, nan, exp;
1575 if (unlikely(!ctx->vsx_enabled)) {
1576 gen_exception(ctx, POWERPC_EXCP_VSXU);
1577 return;
1579 exp = tcg_temp_new_i64();
1580 t0 = tcg_temp_new_i64();
1581 t1 = tcg_temp_new_i64();
1582 zr = tcg_const_i64(0);
1583 nan = tcg_const_i64(2047);
1585 get_cpu_vsrh(t1, xB(ctx->opcode));
1586 tcg_gen_extract_i64(exp, t1, 52, 11);
1587 tcg_gen_movi_i64(t0, 0x0010000000000000);
1588 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1589 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1590 get_cpu_vsrh(t1, xB(ctx->opcode));
1591 tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1593 tcg_temp_free_i64(t0);
1594 tcg_temp_free_i64(t1);
1595 tcg_temp_free_i64(exp);
1596 tcg_temp_free_i64(zr);
1597 tcg_temp_free_i64(nan);
1600 static void gen_xsxsigqp(DisasContext *ctx)
1602 TCGv_i64 t0, zr, nan, exp;
1603 TCGv_i64 xth;
1604 TCGv_i64 xtl;
1605 TCGv_i64 xbh;
1606 TCGv_i64 xbl;
1608 if (unlikely(!ctx->vsx_enabled)) {
1609 gen_exception(ctx, POWERPC_EXCP_VSXU);
1610 return;
1612 xth = tcg_temp_new_i64();
1613 xtl = tcg_temp_new_i64();
1614 xbh = tcg_temp_new_i64();
1615 xbl = tcg_temp_new_i64();
1616 get_cpu_vsrh(xbh, rB(ctx->opcode) + 32);
1617 get_cpu_vsrl(xbl, rB(ctx->opcode) + 32);
1618 exp = tcg_temp_new_i64();
1619 t0 = tcg_temp_new_i64();
1620 zr = tcg_const_i64(0);
1621 nan = tcg_const_i64(32767);
1623 tcg_gen_extract_i64(exp, xbh, 48, 15);
1624 tcg_gen_movi_i64(t0, 0x0001000000000000);
1625 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1626 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1627 tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1628 set_cpu_vsrh(rD(ctx->opcode) + 32, xth);
1629 tcg_gen_mov_i64(xtl, xbl);
1630 set_cpu_vsrl(rD(ctx->opcode) + 32, xtl);
1632 tcg_temp_free_i64(t0);
1633 tcg_temp_free_i64(exp);
1634 tcg_temp_free_i64(zr);
1635 tcg_temp_free_i64(nan);
1636 tcg_temp_free_i64(xth);
1637 tcg_temp_free_i64(xtl);
1638 tcg_temp_free_i64(xbh);
1639 tcg_temp_free_i64(xbl);
1641 #endif
1643 static void gen_xviexpsp(DisasContext *ctx)
1645 TCGv_i64 xth;
1646 TCGv_i64 xtl;
1647 TCGv_i64 xah;
1648 TCGv_i64 xal;
1649 TCGv_i64 xbh;
1650 TCGv_i64 xbl;
1651 TCGv_i64 t0;
1653 if (unlikely(!ctx->vsx_enabled)) {
1654 gen_exception(ctx, POWERPC_EXCP_VSXU);
1655 return;
1657 xth = tcg_temp_new_i64();
1658 xtl = tcg_temp_new_i64();
1659 xah = tcg_temp_new_i64();
1660 xal = tcg_temp_new_i64();
1661 xbh = tcg_temp_new_i64();
1662 xbl = tcg_temp_new_i64();
1663 get_cpu_vsrh(xah, xA(ctx->opcode));
1664 get_cpu_vsrl(xal, xA(ctx->opcode));
1665 get_cpu_vsrh(xbh, xB(ctx->opcode));
1666 get_cpu_vsrl(xbl, xB(ctx->opcode));
1667 t0 = tcg_temp_new_i64();
1669 tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
1670 tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
1671 tcg_gen_shli_i64(t0, t0, 23);
1672 tcg_gen_or_i64(xth, xth, t0);
1673 set_cpu_vsrh(xT(ctx->opcode), xth);
1674 tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
1675 tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
1676 tcg_gen_shli_i64(t0, t0, 23);
1677 tcg_gen_or_i64(xtl, xtl, t0);
1678 set_cpu_vsrl(xT(ctx->opcode), xtl);
1680 tcg_temp_free_i64(t0);
1681 tcg_temp_free_i64(xth);
1682 tcg_temp_free_i64(xtl);
1683 tcg_temp_free_i64(xah);
1684 tcg_temp_free_i64(xal);
1685 tcg_temp_free_i64(xbh);
1686 tcg_temp_free_i64(xbl);
1689 static void gen_xviexpdp(DisasContext *ctx)
1691 TCGv_i64 xth;
1692 TCGv_i64 xtl;
1693 TCGv_i64 xah;
1694 TCGv_i64 xal;
1695 TCGv_i64 xbh;
1696 TCGv_i64 xbl;
1698 if (unlikely(!ctx->vsx_enabled)) {
1699 gen_exception(ctx, POWERPC_EXCP_VSXU);
1700 return;
1702 xth = tcg_temp_new_i64();
1703 xtl = tcg_temp_new_i64();
1704 xah = tcg_temp_new_i64();
1705 xal = tcg_temp_new_i64();
1706 xbh = tcg_temp_new_i64();
1707 xbl = tcg_temp_new_i64();
1708 get_cpu_vsrh(xah, xA(ctx->opcode));
1709 get_cpu_vsrl(xal, xA(ctx->opcode));
1710 get_cpu_vsrh(xbh, xB(ctx->opcode));
1711 get_cpu_vsrl(xbl, xB(ctx->opcode));
1713 tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
1714 set_cpu_vsrh(xT(ctx->opcode), xth);
1716 tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
1717 set_cpu_vsrl(xT(ctx->opcode), xtl);
1719 tcg_temp_free_i64(xth);
1720 tcg_temp_free_i64(xtl);
1721 tcg_temp_free_i64(xah);
1722 tcg_temp_free_i64(xal);
1723 tcg_temp_free_i64(xbh);
1724 tcg_temp_free_i64(xbl);
1727 static void gen_xvxexpsp(DisasContext *ctx)
1729 TCGv_i64 xth;
1730 TCGv_i64 xtl;
1731 TCGv_i64 xbh;
1732 TCGv_i64 xbl;
1734 if (unlikely(!ctx->vsx_enabled)) {
1735 gen_exception(ctx, POWERPC_EXCP_VSXU);
1736 return;
1738 xth = tcg_temp_new_i64();
1739 xtl = tcg_temp_new_i64();
1740 xbh = tcg_temp_new_i64();
1741 xbl = tcg_temp_new_i64();
1742 get_cpu_vsrh(xbh, xB(ctx->opcode));
1743 get_cpu_vsrl(xbl, xB(ctx->opcode));
1745 tcg_gen_shri_i64(xth, xbh, 23);
1746 tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
1747 set_cpu_vsrh(xT(ctx->opcode), xth);
1748 tcg_gen_shri_i64(xtl, xbl, 23);
1749 tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
1750 set_cpu_vsrl(xT(ctx->opcode), xtl);
1752 tcg_temp_free_i64(xth);
1753 tcg_temp_free_i64(xtl);
1754 tcg_temp_free_i64(xbh);
1755 tcg_temp_free_i64(xbl);
1758 static void gen_xvxexpdp(DisasContext *ctx)
1760 TCGv_i64 xth;
1761 TCGv_i64 xtl;
1762 TCGv_i64 xbh;
1763 TCGv_i64 xbl;
1765 if (unlikely(!ctx->vsx_enabled)) {
1766 gen_exception(ctx, POWERPC_EXCP_VSXU);
1767 return;
1769 xth = tcg_temp_new_i64();
1770 xtl = tcg_temp_new_i64();
1771 xbh = tcg_temp_new_i64();
1772 xbl = tcg_temp_new_i64();
1773 get_cpu_vsrh(xbh, xB(ctx->opcode));
1774 get_cpu_vsrl(xbl, xB(ctx->opcode));
1776 tcg_gen_extract_i64(xth, xbh, 52, 11);
1777 set_cpu_vsrh(xT(ctx->opcode), xth);
1778 tcg_gen_extract_i64(xtl, xbl, 52, 11);
1779 set_cpu_vsrl(xT(ctx->opcode), xtl);
1781 tcg_temp_free_i64(xth);
1782 tcg_temp_free_i64(xtl);
1783 tcg_temp_free_i64(xbh);
1784 tcg_temp_free_i64(xbl);
1787 GEN_VSX_HELPER_2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
1789 static void gen_xvxsigdp(DisasContext *ctx)
1791 TCGv_i64 xth;
1792 TCGv_i64 xtl;
1793 TCGv_i64 xbh;
1794 TCGv_i64 xbl;
1795 TCGv_i64 t0, zr, nan, exp;
1797 if (unlikely(!ctx->vsx_enabled)) {
1798 gen_exception(ctx, POWERPC_EXCP_VSXU);
1799 return;
1801 xth = tcg_temp_new_i64();
1802 xtl = tcg_temp_new_i64();
1803 xbh = tcg_temp_new_i64();
1804 xbl = tcg_temp_new_i64();
1805 get_cpu_vsrh(xbh, xB(ctx->opcode));
1806 get_cpu_vsrl(xbl, xB(ctx->opcode));
1807 exp = tcg_temp_new_i64();
1808 t0 = tcg_temp_new_i64();
1809 zr = tcg_const_i64(0);
1810 nan = tcg_const_i64(2047);
1812 tcg_gen_extract_i64(exp, xbh, 52, 11);
1813 tcg_gen_movi_i64(t0, 0x0010000000000000);
1814 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1815 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1816 tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
1817 set_cpu_vsrh(xT(ctx->opcode), xth);
1819 tcg_gen_extract_i64(exp, xbl, 52, 11);
1820 tcg_gen_movi_i64(t0, 0x0010000000000000);
1821 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1822 tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1823 tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
1824 set_cpu_vsrl(xT(ctx->opcode), xtl);
1826 tcg_temp_free_i64(t0);
1827 tcg_temp_free_i64(exp);
1828 tcg_temp_free_i64(zr);
1829 tcg_temp_free_i64(nan);
1830 tcg_temp_free_i64(xth);
1831 tcg_temp_free_i64(xtl);
1832 tcg_temp_free_i64(xbh);
1833 tcg_temp_free_i64(xbl);
1836 #undef GEN_XX2FORM
1837 #undef GEN_XX3FORM
1838 #undef GEN_XX2IFORM
1839 #undef GEN_XX3_RC_FORM
1840 #undef GEN_XX3FORM_DM
1841 #undef VSX_LOGICAL