target/ppc: declare xscvspdpn helper with call flags
[qemu.git] / target / ppc / translate / vsx-impl.c.inc
blob9b4f309d9d58b02626b99dc0418088c6d79ddbd3
1 /***                           VSX extension                               ***/
3 static inline void get_cpu_vsr(TCGv_i64 dst, int n, bool high)
5     tcg_gen_ld_i64(dst, cpu_env, vsr64_offset(n, high));
8 static inline void set_cpu_vsr(int n, TCGv_i64 src, bool high)
10     tcg_gen_st_i64(src, cpu_env, vsr64_offset(n, high));
13 static inline TCGv_ptr gen_vsr_ptr(int reg)
15     TCGv_ptr r = tcg_temp_new_ptr();
16     tcg_gen_addi_ptr(r, cpu_env, vsr_full_offset(reg));
17     return r;
20 #define VSX_LOAD_SCALAR(name, operation)                      \
21 static void gen_##name(DisasContext *ctx)                     \
22 {                                                             \
23     TCGv EA;                                                  \
24     TCGv_i64 t0;                                              \
25     if (unlikely(!ctx->vsx_enabled)) {                        \
26         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
27         return;                                               \
28     }                                                         \
29     t0 = tcg_temp_new_i64();                                  \
30     gen_set_access_type(ctx, ACCESS_INT);                     \
31     EA = tcg_temp_new();                                      \
32     gen_addr_reg_index(ctx, EA);                              \
33     gen_qemu_##operation(ctx, t0, EA);                        \
34     set_cpu_vsr(xT(ctx->opcode), t0, true);                   \
35     /* NOTE: cpu_vsrl is undefined */                         \
36     tcg_temp_free(EA);                                        \
37     tcg_temp_free_i64(t0);                                    \
40 VSX_LOAD_SCALAR(lxsdx, ld64_i64)
41 VSX_LOAD_SCALAR(lxsiwax, ld32s_i64)
42 VSX_LOAD_SCALAR(lxsibzx, ld8u_i64)
43 VSX_LOAD_SCALAR(lxsihzx, ld16u_i64)
44 VSX_LOAD_SCALAR(lxsiwzx, ld32u_i64)
45 VSX_LOAD_SCALAR(lxsspx, ld32fs)
47 static void gen_lxvd2x(DisasContext *ctx)
49     TCGv EA;
50     TCGv_i64 t0;
51     if (unlikely(!ctx->vsx_enabled)) {
52         gen_exception(ctx, POWERPC_EXCP_VSXU);
53         return;
54     }
55     t0 = tcg_temp_new_i64();
56     gen_set_access_type(ctx, ACCESS_INT);
57     EA = tcg_temp_new();
58     gen_addr_reg_index(ctx, EA);
59     gen_qemu_ld64_i64(ctx, t0, EA);
60     set_cpu_vsr(xT(ctx->opcode), t0, true);
61     tcg_gen_addi_tl(EA, EA, 8);
62     gen_qemu_ld64_i64(ctx, t0, EA);
63     set_cpu_vsr(xT(ctx->opcode), t0, false);
64     tcg_temp_free(EA);
65     tcg_temp_free_i64(t0);
68 static void gen_lxvw4x(DisasContext *ctx)
70     TCGv EA;
71     TCGv_i64 xth;
72     TCGv_i64 xtl;
73     if (unlikely(!ctx->vsx_enabled)) {
74         gen_exception(ctx, POWERPC_EXCP_VSXU);
75         return;
76     }
77     xth = tcg_temp_new_i64();
78     xtl = tcg_temp_new_i64();
80     gen_set_access_type(ctx, ACCESS_INT);
81     EA = tcg_temp_new();
83     gen_addr_reg_index(ctx, EA);
84     if (ctx->le_mode) {
85         TCGv_i64 t0 = tcg_temp_new_i64();
86         TCGv_i64 t1 = tcg_temp_new_i64();
88         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
89         tcg_gen_shri_i64(t1, t0, 32);
90         tcg_gen_deposit_i64(xth, t1, t0, 32, 32);
91         tcg_gen_addi_tl(EA, EA, 8);
92         tcg_gen_qemu_ld_i64(t0, EA, ctx->mem_idx, MO_LEUQ);
93         tcg_gen_shri_i64(t1, t0, 32);
94         tcg_gen_deposit_i64(xtl, t1, t0, 32, 32);
95         tcg_temp_free_i64(t0);
96         tcg_temp_free_i64(t1);
97     } else {
98         tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
99         tcg_gen_addi_tl(EA, EA, 8);
100         tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
101     }
102     set_cpu_vsr(xT(ctx->opcode), xth, true);
103     set_cpu_vsr(xT(ctx->opcode), xtl, false);
104     tcg_temp_free(EA);
105     tcg_temp_free_i64(xth);
106     tcg_temp_free_i64(xtl);
109 static void gen_lxvwsx(DisasContext *ctx)
111     TCGv EA;
112     TCGv_i32 data;
114     if (xT(ctx->opcode) < 32) {
115         if (unlikely(!ctx->vsx_enabled)) {
116             gen_exception(ctx, POWERPC_EXCP_VSXU);
117             return;
118         }
119     } else {
120         if (unlikely(!ctx->altivec_enabled)) {
121             gen_exception(ctx, POWERPC_EXCP_VPU);
122             return;
123         }
124     }
126     gen_set_access_type(ctx, ACCESS_INT);
127     EA = tcg_temp_new();
129     gen_addr_reg_index(ctx, EA);
131     data = tcg_temp_new_i32();
132     tcg_gen_qemu_ld_i32(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UL));
133     tcg_gen_gvec_dup_i32(MO_UL, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
135     tcg_temp_free(EA);
136     tcg_temp_free_i32(data);
139 static void gen_lxvdsx(DisasContext *ctx)
141     TCGv EA;
142     TCGv_i64 data;
144     if (unlikely(!ctx->vsx_enabled)) {
145         gen_exception(ctx, POWERPC_EXCP_VSXU);
146         return;
147     }
149     gen_set_access_type(ctx, ACCESS_INT);
150     EA = tcg_temp_new();
152     gen_addr_reg_index(ctx, EA);
154     data = tcg_temp_new_i64();
155     tcg_gen_qemu_ld_i64(data, EA, ctx->mem_idx, DEF_MEMOP(MO_UQ));
156     tcg_gen_gvec_dup_i64(MO_UQ, vsr_full_offset(xT(ctx->opcode)), 16, 16, data);
158     tcg_temp_free(EA);
159     tcg_temp_free_i64(data);
162 static void gen_bswap16x8(TCGv_i64 outh, TCGv_i64 outl,
163                           TCGv_i64 inh, TCGv_i64 inl)
165     TCGv_i64 mask = tcg_const_i64(0x00FF00FF00FF00FF);
166     TCGv_i64 t0 = tcg_temp_new_i64();
167     TCGv_i64 t1 = tcg_temp_new_i64();
169     /* outh = ((inh & mask) << 8) | ((inh >> 8) & mask) */
170     tcg_gen_and_i64(t0, inh, mask);
171     tcg_gen_shli_i64(t0, t0, 8);
172     tcg_gen_shri_i64(t1, inh, 8);
173     tcg_gen_and_i64(t1, t1, mask);
174     tcg_gen_or_i64(outh, t0, t1);
176     /* outl = ((inl & mask) << 8) | ((inl >> 8) & mask) */
177     tcg_gen_and_i64(t0, inl, mask);
178     tcg_gen_shli_i64(t0, t0, 8);
179     tcg_gen_shri_i64(t1, inl, 8);
180     tcg_gen_and_i64(t1, t1, mask);
181     tcg_gen_or_i64(outl, t0, t1);
183     tcg_temp_free_i64(t0);
184     tcg_temp_free_i64(t1);
185     tcg_temp_free_i64(mask);
188 static void gen_bswap32x4(TCGv_i64 outh, TCGv_i64 outl,
189                           TCGv_i64 inh, TCGv_i64 inl)
191     TCGv_i64 hi = tcg_temp_new_i64();
192     TCGv_i64 lo = tcg_temp_new_i64();
194     tcg_gen_bswap64_i64(hi, inh);
195     tcg_gen_bswap64_i64(lo, inl);
196     tcg_gen_shri_i64(outh, hi, 32);
197     tcg_gen_deposit_i64(outh, outh, hi, 32, 32);
198     tcg_gen_shri_i64(outl, lo, 32);
199     tcg_gen_deposit_i64(outl, outl, lo, 32, 32);
201     tcg_temp_free_i64(hi);
202     tcg_temp_free_i64(lo);
204 static void gen_lxvh8x(DisasContext *ctx)
206     TCGv EA;
207     TCGv_i64 xth;
208     TCGv_i64 xtl;
210     if (unlikely(!ctx->vsx_enabled)) {
211         gen_exception(ctx, POWERPC_EXCP_VSXU);
212         return;
213     }
214     xth = tcg_temp_new_i64();
215     xtl = tcg_temp_new_i64();
216     gen_set_access_type(ctx, ACCESS_INT);
218     EA = tcg_temp_new();
219     gen_addr_reg_index(ctx, EA);
220     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
221     tcg_gen_addi_tl(EA, EA, 8);
222     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
223     if (ctx->le_mode) {
224         gen_bswap16x8(xth, xtl, xth, xtl);
225     }
226     set_cpu_vsr(xT(ctx->opcode), xth, true);
227     set_cpu_vsr(xT(ctx->opcode), xtl, false);
228     tcg_temp_free(EA);
229     tcg_temp_free_i64(xth);
230     tcg_temp_free_i64(xtl);
233 static void gen_lxvb16x(DisasContext *ctx)
235     TCGv EA;
236     TCGv_i64 xth;
237     TCGv_i64 xtl;
239     if (unlikely(!ctx->vsx_enabled)) {
240         gen_exception(ctx, POWERPC_EXCP_VSXU);
241         return;
242     }
243     xth = tcg_temp_new_i64();
244     xtl = tcg_temp_new_i64();
245     gen_set_access_type(ctx, ACCESS_INT);
246     EA = tcg_temp_new();
247     gen_addr_reg_index(ctx, EA);
248     tcg_gen_qemu_ld_i64(xth, EA, ctx->mem_idx, MO_BEUQ);
249     tcg_gen_addi_tl(EA, EA, 8);
250     tcg_gen_qemu_ld_i64(xtl, EA, ctx->mem_idx, MO_BEUQ);
251     set_cpu_vsr(xT(ctx->opcode), xth, true);
252     set_cpu_vsr(xT(ctx->opcode), xtl, false);
253     tcg_temp_free(EA);
254     tcg_temp_free_i64(xth);
255     tcg_temp_free_i64(xtl);
258 #ifdef TARGET_PPC64
259 #define VSX_VECTOR_LOAD_STORE_LENGTH(name)                         \
260 static void gen_##name(DisasContext *ctx)                          \
261 {                                                                  \
262     TCGv EA;                                                       \
263     TCGv_ptr xt;                                                   \
264                                                                    \
265     if (xT(ctx->opcode) < 32) {                                    \
266         if (unlikely(!ctx->vsx_enabled)) {                         \
267             gen_exception(ctx, POWERPC_EXCP_VSXU);                 \
268             return;                                                \
269         }                                                          \
270     } else {                                                       \
271         if (unlikely(!ctx->altivec_enabled)) {                     \
272             gen_exception(ctx, POWERPC_EXCP_VPU);                  \
273             return;                                                \
274         }                                                          \
275     }                                                              \
276     EA = tcg_temp_new();                                           \
277     xt = gen_vsr_ptr(xT(ctx->opcode));                             \
278     gen_set_access_type(ctx, ACCESS_INT);                          \
279     gen_addr_register(ctx, EA);                                    \
280     gen_helper_##name(cpu_env, EA, xt, cpu_gpr[rB(ctx->opcode)]);  \
281     tcg_temp_free(EA);                                             \
282     tcg_temp_free_ptr(xt);                                         \
285 VSX_VECTOR_LOAD_STORE_LENGTH(lxvl)
286 VSX_VECTOR_LOAD_STORE_LENGTH(lxvll)
287 VSX_VECTOR_LOAD_STORE_LENGTH(stxvl)
288 VSX_VECTOR_LOAD_STORE_LENGTH(stxvll)
289 #endif
291 #define VSX_STORE_SCALAR(name, operation)                     \
292 static void gen_##name(DisasContext *ctx)                     \
293 {                                                             \
294     TCGv EA;                                                  \
295     TCGv_i64 t0;                                              \
296     if (unlikely(!ctx->vsx_enabled)) {                        \
297         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
298         return;                                               \
299     }                                                         \
300     t0 = tcg_temp_new_i64();                                  \
301     gen_set_access_type(ctx, ACCESS_INT);                     \
302     EA = tcg_temp_new();                                      \
303     gen_addr_reg_index(ctx, EA);                              \
304     get_cpu_vsr(t0, xS(ctx->opcode), true);                   \
305     gen_qemu_##operation(ctx, t0, EA);                        \
306     tcg_temp_free(EA);                                        \
307     tcg_temp_free_i64(t0);                                    \
310 VSX_STORE_SCALAR(stxsdx, st64_i64)
312 VSX_STORE_SCALAR(stxsibx, st8_i64)
313 VSX_STORE_SCALAR(stxsihx, st16_i64)
314 VSX_STORE_SCALAR(stxsiwx, st32_i64)
315 VSX_STORE_SCALAR(stxsspx, st32fs)
317 static void gen_stxvd2x(DisasContext *ctx)
319     TCGv EA;
320     TCGv_i64 t0;
321     if (unlikely(!ctx->vsx_enabled)) {
322         gen_exception(ctx, POWERPC_EXCP_VSXU);
323         return;
324     }
325     t0 = tcg_temp_new_i64();
326     gen_set_access_type(ctx, ACCESS_INT);
327     EA = tcg_temp_new();
328     gen_addr_reg_index(ctx, EA);
329     get_cpu_vsr(t0, xS(ctx->opcode), true);
330     gen_qemu_st64_i64(ctx, t0, EA);
331     tcg_gen_addi_tl(EA, EA, 8);
332     get_cpu_vsr(t0, xS(ctx->opcode), false);
333     gen_qemu_st64_i64(ctx, t0, EA);
334     tcg_temp_free(EA);
335     tcg_temp_free_i64(t0);
338 static void gen_stxvw4x(DisasContext *ctx)
340     TCGv EA;
341     TCGv_i64 xsh;
342     TCGv_i64 xsl;
344     if (unlikely(!ctx->vsx_enabled)) {
345         gen_exception(ctx, POWERPC_EXCP_VSXU);
346         return;
347     }
348     xsh = tcg_temp_new_i64();
349     xsl = tcg_temp_new_i64();
350     get_cpu_vsr(xsh, xS(ctx->opcode), true);
351     get_cpu_vsr(xsl, xS(ctx->opcode), false);
352     gen_set_access_type(ctx, ACCESS_INT);
353     EA = tcg_temp_new();
354     gen_addr_reg_index(ctx, EA);
355     if (ctx->le_mode) {
356         TCGv_i64 t0 = tcg_temp_new_i64();
357         TCGv_i64 t1 = tcg_temp_new_i64();
359         tcg_gen_shri_i64(t0, xsh, 32);
360         tcg_gen_deposit_i64(t1, t0, xsh, 32, 32);
361         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
362         tcg_gen_addi_tl(EA, EA, 8);
363         tcg_gen_shri_i64(t0, xsl, 32);
364         tcg_gen_deposit_i64(t1, t0, xsl, 32, 32);
365         tcg_gen_qemu_st_i64(t1, EA, ctx->mem_idx, MO_LEUQ);
366         tcg_temp_free_i64(t0);
367         tcg_temp_free_i64(t1);
368     } else {
369         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
370         tcg_gen_addi_tl(EA, EA, 8);
371         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
372     }
373     tcg_temp_free(EA);
374     tcg_temp_free_i64(xsh);
375     tcg_temp_free_i64(xsl);
378 static void gen_stxvh8x(DisasContext *ctx)
380     TCGv EA;
381     TCGv_i64 xsh;
382     TCGv_i64 xsl;
384     if (unlikely(!ctx->vsx_enabled)) {
385         gen_exception(ctx, POWERPC_EXCP_VSXU);
386         return;
387     }
388     xsh = tcg_temp_new_i64();
389     xsl = tcg_temp_new_i64();
390     get_cpu_vsr(xsh, xS(ctx->opcode), true);
391     get_cpu_vsr(xsl, xS(ctx->opcode), false);
392     gen_set_access_type(ctx, ACCESS_INT);
393     EA = tcg_temp_new();
394     gen_addr_reg_index(ctx, EA);
395     if (ctx->le_mode) {
396         TCGv_i64 outh = tcg_temp_new_i64();
397         TCGv_i64 outl = tcg_temp_new_i64();
399         gen_bswap16x8(outh, outl, xsh, xsl);
400         tcg_gen_qemu_st_i64(outh, EA, ctx->mem_idx, MO_BEUQ);
401         tcg_gen_addi_tl(EA, EA, 8);
402         tcg_gen_qemu_st_i64(outl, EA, ctx->mem_idx, MO_BEUQ);
403         tcg_temp_free_i64(outh);
404         tcg_temp_free_i64(outl);
405     } else {
406         tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
407         tcg_gen_addi_tl(EA, EA, 8);
408         tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
409     }
410     tcg_temp_free(EA);
411     tcg_temp_free_i64(xsh);
412     tcg_temp_free_i64(xsl);
415 static void gen_stxvb16x(DisasContext *ctx)
417     TCGv EA;
418     TCGv_i64 xsh;
419     TCGv_i64 xsl;
421     if (unlikely(!ctx->vsx_enabled)) {
422         gen_exception(ctx, POWERPC_EXCP_VSXU);
423         return;
424     }
425     xsh = tcg_temp_new_i64();
426     xsl = tcg_temp_new_i64();
427     get_cpu_vsr(xsh, xS(ctx->opcode), true);
428     get_cpu_vsr(xsl, xS(ctx->opcode), false);
429     gen_set_access_type(ctx, ACCESS_INT);
430     EA = tcg_temp_new();
431     gen_addr_reg_index(ctx, EA);
432     tcg_gen_qemu_st_i64(xsh, EA, ctx->mem_idx, MO_BEUQ);
433     tcg_gen_addi_tl(EA, EA, 8);
434     tcg_gen_qemu_st_i64(xsl, EA, ctx->mem_idx, MO_BEUQ);
435     tcg_temp_free(EA);
436     tcg_temp_free_i64(xsh);
437     tcg_temp_free_i64(xsl);
440 static void gen_mfvsrwz(DisasContext *ctx)
442     if (xS(ctx->opcode) < 32) {
443         if (unlikely(!ctx->fpu_enabled)) {
444             gen_exception(ctx, POWERPC_EXCP_FPU);
445             return;
446         }
447     } else {
448         if (unlikely(!ctx->altivec_enabled)) {
449             gen_exception(ctx, POWERPC_EXCP_VPU);
450             return;
451         }
452     }
453     TCGv_i64 tmp = tcg_temp_new_i64();
454     TCGv_i64 xsh = tcg_temp_new_i64();
455     get_cpu_vsr(xsh, xS(ctx->opcode), true);
456     tcg_gen_ext32u_i64(tmp, xsh);
457     tcg_gen_trunc_i64_tl(cpu_gpr[rA(ctx->opcode)], tmp);
458     tcg_temp_free_i64(tmp);
459     tcg_temp_free_i64(xsh);
462 static void gen_mtvsrwa(DisasContext *ctx)
464     if (xS(ctx->opcode) < 32) {
465         if (unlikely(!ctx->fpu_enabled)) {
466             gen_exception(ctx, POWERPC_EXCP_FPU);
467             return;
468         }
469     } else {
470         if (unlikely(!ctx->altivec_enabled)) {
471             gen_exception(ctx, POWERPC_EXCP_VPU);
472             return;
473         }
474     }
475     TCGv_i64 tmp = tcg_temp_new_i64();
476     TCGv_i64 xsh = tcg_temp_new_i64();
477     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
478     tcg_gen_ext32s_i64(xsh, tmp);
479     set_cpu_vsr(xT(ctx->opcode), xsh, true);
480     tcg_temp_free_i64(tmp);
481     tcg_temp_free_i64(xsh);
484 static void gen_mtvsrwz(DisasContext *ctx)
486     if (xS(ctx->opcode) < 32) {
487         if (unlikely(!ctx->fpu_enabled)) {
488             gen_exception(ctx, POWERPC_EXCP_FPU);
489             return;
490         }
491     } else {
492         if (unlikely(!ctx->altivec_enabled)) {
493             gen_exception(ctx, POWERPC_EXCP_VPU);
494             return;
495         }
496     }
497     TCGv_i64 tmp = tcg_temp_new_i64();
498     TCGv_i64 xsh = tcg_temp_new_i64();
499     tcg_gen_extu_tl_i64(tmp, cpu_gpr[rA(ctx->opcode)]);
500     tcg_gen_ext32u_i64(xsh, tmp);
501     set_cpu_vsr(xT(ctx->opcode), xsh, true);
502     tcg_temp_free_i64(tmp);
503     tcg_temp_free_i64(xsh);
506 #if defined(TARGET_PPC64)
507 static void gen_mfvsrd(DisasContext *ctx)
509     TCGv_i64 t0;
510     if (xS(ctx->opcode) < 32) {
511         if (unlikely(!ctx->fpu_enabled)) {
512             gen_exception(ctx, POWERPC_EXCP_FPU);
513             return;
514         }
515     } else {
516         if (unlikely(!ctx->altivec_enabled)) {
517             gen_exception(ctx, POWERPC_EXCP_VPU);
518             return;
519         }
520     }
521     t0 = tcg_temp_new_i64();
522     get_cpu_vsr(t0, xS(ctx->opcode), true);
523     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
524     tcg_temp_free_i64(t0);
527 static void gen_mtvsrd(DisasContext *ctx)
529     TCGv_i64 t0;
530     if (xS(ctx->opcode) < 32) {
531         if (unlikely(!ctx->fpu_enabled)) {
532             gen_exception(ctx, POWERPC_EXCP_FPU);
533             return;
534         }
535     } else {
536         if (unlikely(!ctx->altivec_enabled)) {
537             gen_exception(ctx, POWERPC_EXCP_VPU);
538             return;
539         }
540     }
541     t0 = tcg_temp_new_i64();
542     tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
543     set_cpu_vsr(xT(ctx->opcode), t0, true);
544     tcg_temp_free_i64(t0);
547 static void gen_mfvsrld(DisasContext *ctx)
549     TCGv_i64 t0;
550     if (xS(ctx->opcode) < 32) {
551         if (unlikely(!ctx->vsx_enabled)) {
552             gen_exception(ctx, POWERPC_EXCP_VSXU);
553             return;
554         }
555     } else {
556         if (unlikely(!ctx->altivec_enabled)) {
557             gen_exception(ctx, POWERPC_EXCP_VPU);
558             return;
559         }
560     }
561     t0 = tcg_temp_new_i64();
562     get_cpu_vsr(t0, xS(ctx->opcode), false);
563     tcg_gen_mov_i64(cpu_gpr[rA(ctx->opcode)], t0);
564     tcg_temp_free_i64(t0);
567 static void gen_mtvsrdd(DisasContext *ctx)
569     TCGv_i64 t0;
570     if (xT(ctx->opcode) < 32) {
571         if (unlikely(!ctx->vsx_enabled)) {
572             gen_exception(ctx, POWERPC_EXCP_VSXU);
573             return;
574         }
575     } else {
576         if (unlikely(!ctx->altivec_enabled)) {
577             gen_exception(ctx, POWERPC_EXCP_VPU);
578             return;
579         }
580     }
582     t0 = tcg_temp_new_i64();
583     if (!rA(ctx->opcode)) {
584         tcg_gen_movi_i64(t0, 0);
585     } else {
586         tcg_gen_mov_i64(t0, cpu_gpr[rA(ctx->opcode)]);
587     }
588     set_cpu_vsr(xT(ctx->opcode), t0, true);
590     tcg_gen_mov_i64(t0, cpu_gpr[rB(ctx->opcode)]);
591     set_cpu_vsr(xT(ctx->opcode), t0, false);
592     tcg_temp_free_i64(t0);
595 static void gen_mtvsrws(DisasContext *ctx)
597     TCGv_i64 t0;
598     if (xT(ctx->opcode) < 32) {
599         if (unlikely(!ctx->vsx_enabled)) {
600             gen_exception(ctx, POWERPC_EXCP_VSXU);
601             return;
602         }
603     } else {
604         if (unlikely(!ctx->altivec_enabled)) {
605             gen_exception(ctx, POWERPC_EXCP_VPU);
606             return;
607         }
608     }
610     t0 = tcg_temp_new_i64();
611     tcg_gen_deposit_i64(t0, cpu_gpr[rA(ctx->opcode)],
612                         cpu_gpr[rA(ctx->opcode)], 32, 32);
613     set_cpu_vsr(xT(ctx->opcode), t0, false);
614     set_cpu_vsr(xT(ctx->opcode), t0, true);
615     tcg_temp_free_i64(t0);
618 #endif
620 #define OP_ABS 1
621 #define OP_NABS 2
622 #define OP_NEG 3
623 #define OP_CPSGN 4
624 #define SGN_MASK_DP  0x8000000000000000ull
625 #define SGN_MASK_SP 0x8000000080000000ull
627 #define VSX_SCALAR_MOVE(name, op, sgn_mask)                       \
628 static void glue(gen_, name)(DisasContext *ctx)                   \
629     {                                                             \
630         TCGv_i64 xb, sgm;                                         \
631         if (unlikely(!ctx->vsx_enabled)) {                        \
632             gen_exception(ctx, POWERPC_EXCP_VSXU);                \
633             return;                                               \
634         }                                                         \
635         xb = tcg_temp_new_i64();                                  \
636         sgm = tcg_temp_new_i64();                                 \
637         get_cpu_vsr(xb, xB(ctx->opcode), true);                   \
638         tcg_gen_movi_i64(sgm, sgn_mask);                          \
639         switch (op) {                                             \
640             case OP_ABS: {                                        \
641                 tcg_gen_andc_i64(xb, xb, sgm);                    \
642                 break;                                            \
643             }                                                     \
644             case OP_NABS: {                                       \
645                 tcg_gen_or_i64(xb, xb, sgm);                      \
646                 break;                                            \
647             }                                                     \
648             case OP_NEG: {                                        \
649                 tcg_gen_xor_i64(xb, xb, sgm);                     \
650                 break;                                            \
651             }                                                     \
652             case OP_CPSGN: {                                      \
653                 TCGv_i64 xa = tcg_temp_new_i64();                 \
654                 get_cpu_vsr(xa, xA(ctx->opcode), true);           \
655                 tcg_gen_and_i64(xa, xa, sgm);                     \
656                 tcg_gen_andc_i64(xb, xb, sgm);                    \
657                 tcg_gen_or_i64(xb, xb, xa);                       \
658                 tcg_temp_free_i64(xa);                            \
659                 break;                                            \
660             }                                                     \
661         }                                                         \
662         set_cpu_vsr(xT(ctx->opcode), xb, true);                   \
663         set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
664         tcg_temp_free_i64(xb);                                    \
665         tcg_temp_free_i64(sgm);                                   \
666     }
668 VSX_SCALAR_MOVE(xsabsdp, OP_ABS, SGN_MASK_DP)
669 VSX_SCALAR_MOVE(xsnabsdp, OP_NABS, SGN_MASK_DP)
670 VSX_SCALAR_MOVE(xsnegdp, OP_NEG, SGN_MASK_DP)
671 VSX_SCALAR_MOVE(xscpsgndp, OP_CPSGN, SGN_MASK_DP)
673 #define VSX_SCALAR_MOVE_QP(name, op, sgn_mask)                    \
674 static void glue(gen_, name)(DisasContext *ctx)                   \
675 {                                                                 \
676     int xa;                                                       \
677     int xt = rD(ctx->opcode) + 32;                                \
678     int xb = rB(ctx->opcode) + 32;                                \
679     TCGv_i64 xah, xbh, xbl, sgm, tmp;                             \
680                                                                   \
681     if (unlikely(!ctx->vsx_enabled)) {                            \
682         gen_exception(ctx, POWERPC_EXCP_VSXU);                    \
683         return;                                                   \
684     }                                                             \
685     xbh = tcg_temp_new_i64();                                     \
686     xbl = tcg_temp_new_i64();                                     \
687     sgm = tcg_temp_new_i64();                                     \
688     tmp = tcg_temp_new_i64();                                     \
689     get_cpu_vsr(xbh, xb, true);                                   \
690     get_cpu_vsr(xbl, xb, false);                                  \
691     tcg_gen_movi_i64(sgm, sgn_mask);                              \
692     switch (op) {                                                 \
693     case OP_ABS:                                                  \
694         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
695         break;                                                    \
696     case OP_NABS:                                                 \
697         tcg_gen_or_i64(xbh, xbh, sgm);                            \
698         break;                                                    \
699     case OP_NEG:                                                  \
700         tcg_gen_xor_i64(xbh, xbh, sgm);                           \
701         break;                                                    \
702     case OP_CPSGN:                                                \
703         xah = tcg_temp_new_i64();                                 \
704         xa = rA(ctx->opcode) + 32;                                \
705         get_cpu_vsr(tmp, xa, true);                               \
706         tcg_gen_and_i64(xah, tmp, sgm);                           \
707         tcg_gen_andc_i64(xbh, xbh, sgm);                          \
708         tcg_gen_or_i64(xbh, xbh, xah);                            \
709         tcg_temp_free_i64(xah);                                   \
710         break;                                                    \
711     }                                                             \
712     set_cpu_vsr(xt, xbh, true);                                   \
713     set_cpu_vsr(xt, xbl, false);                                  \
714     tcg_temp_free_i64(xbl);                                       \
715     tcg_temp_free_i64(xbh);                                       \
716     tcg_temp_free_i64(sgm);                                       \
717     tcg_temp_free_i64(tmp);                                       \
720 VSX_SCALAR_MOVE_QP(xsabsqp, OP_ABS, SGN_MASK_DP)
721 VSX_SCALAR_MOVE_QP(xsnabsqp, OP_NABS, SGN_MASK_DP)
722 VSX_SCALAR_MOVE_QP(xsnegqp, OP_NEG, SGN_MASK_DP)
723 VSX_SCALAR_MOVE_QP(xscpsgnqp, OP_CPSGN, SGN_MASK_DP)
725 #define VSX_VECTOR_MOVE(name, op, sgn_mask)                      \
726 static void glue(gen_, name)(DisasContext *ctx)                  \
727     {                                                            \
728         TCGv_i64 xbh, xbl, sgm;                                  \
729         if (unlikely(!ctx->vsx_enabled)) {                       \
730             gen_exception(ctx, POWERPC_EXCP_VSXU);               \
731             return;                                              \
732         }                                                        \
733         xbh = tcg_temp_new_i64();                                \
734         xbl = tcg_temp_new_i64();                                \
735         sgm = tcg_temp_new_i64();                                \
736         get_cpu_vsr(xbh, xB(ctx->opcode), true);                 \
737         get_cpu_vsr(xbl, xB(ctx->opcode), false);                \
738         tcg_gen_movi_i64(sgm, sgn_mask);                         \
739         switch (op) {                                            \
740             case OP_ABS: {                                       \
741                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
742                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
743                 break;                                           \
744             }                                                    \
745             case OP_NABS: {                                      \
746                 tcg_gen_or_i64(xbh, xbh, sgm);                   \
747                 tcg_gen_or_i64(xbl, xbl, sgm);                   \
748                 break;                                           \
749             }                                                    \
750             case OP_NEG: {                                       \
751                 tcg_gen_xor_i64(xbh, xbh, sgm);                  \
752                 tcg_gen_xor_i64(xbl, xbl, sgm);                  \
753                 break;                                           \
754             }                                                    \
755             case OP_CPSGN: {                                     \
756                 TCGv_i64 xah = tcg_temp_new_i64();               \
757                 TCGv_i64 xal = tcg_temp_new_i64();               \
758                 get_cpu_vsr(xah, xA(ctx->opcode), true);         \
759                 get_cpu_vsr(xal, xA(ctx->opcode), false);        \
760                 tcg_gen_and_i64(xah, xah, sgm);                  \
761                 tcg_gen_and_i64(xal, xal, sgm);                  \
762                 tcg_gen_andc_i64(xbh, xbh, sgm);                 \
763                 tcg_gen_andc_i64(xbl, xbl, sgm);                 \
764                 tcg_gen_or_i64(xbh, xbh, xah);                   \
765                 tcg_gen_or_i64(xbl, xbl, xal);                   \
766                 tcg_temp_free_i64(xah);                          \
767                 tcg_temp_free_i64(xal);                          \
768                 break;                                           \
769             }                                                    \
770         }                                                        \
771         set_cpu_vsr(xT(ctx->opcode), xbh, true);                 \
772         set_cpu_vsr(xT(ctx->opcode), xbl, false);                \
773         tcg_temp_free_i64(xbh);                                  \
774         tcg_temp_free_i64(xbl);                                  \
775         tcg_temp_free_i64(sgm);                                  \
776     }
778 VSX_VECTOR_MOVE(xvabsdp, OP_ABS, SGN_MASK_DP)
779 VSX_VECTOR_MOVE(xvnabsdp, OP_NABS, SGN_MASK_DP)
780 VSX_VECTOR_MOVE(xvnegdp, OP_NEG, SGN_MASK_DP)
781 VSX_VECTOR_MOVE(xvcpsgndp, OP_CPSGN, SGN_MASK_DP)
782 VSX_VECTOR_MOVE(xvabssp, OP_ABS, SGN_MASK_SP)
783 VSX_VECTOR_MOVE(xvnabssp, OP_NABS, SGN_MASK_SP)
784 VSX_VECTOR_MOVE(xvnegsp, OP_NEG, SGN_MASK_SP)
785 VSX_VECTOR_MOVE(xvcpsgnsp, OP_CPSGN, SGN_MASK_SP)
787 #define VSX_CMP(name, op1, op2, inval, type)                                  \
788 static void gen_##name(DisasContext *ctx)                                     \
789 {                                                                             \
790     TCGv_i32 ignored;                                                         \
791     TCGv_ptr xt, xa, xb;                                                      \
792     if (unlikely(!ctx->vsx_enabled)) {                                        \
793         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
794         return;                                                               \
795     }                                                                         \
796     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
797     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
798     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
799     if ((ctx->opcode >> (31 - 21)) & 1) {                                     \
800         gen_helper_##name(cpu_crf[6], cpu_env, xt, xa, xb);                   \
801     } else {                                                                  \
802         ignored = tcg_temp_new_i32();                                         \
803         gen_helper_##name(ignored, cpu_env, xt, xa, xb);                      \
804         tcg_temp_free_i32(ignored);                                           \
805     }                                                                         \
806     gen_helper_float_check_status(cpu_env);                                   \
807     tcg_temp_free_ptr(xt);                                                    \
808     tcg_temp_free_ptr(xa);                                                    \
809     tcg_temp_free_ptr(xb);                                                    \
812 VSX_CMP(xvcmpeqdp, 0x0C, 0x0C, 0, PPC2_VSX)
813 VSX_CMP(xvcmpgedp, 0x0C, 0x0E, 0, PPC2_VSX)
814 VSX_CMP(xvcmpgtdp, 0x0C, 0x0D, 0, PPC2_VSX)
815 VSX_CMP(xvcmpnedp, 0x0C, 0x0F, 0, PPC2_ISA300)
816 VSX_CMP(xvcmpeqsp, 0x0C, 0x08, 0, PPC2_VSX)
817 VSX_CMP(xvcmpgesp, 0x0C, 0x0A, 0, PPC2_VSX)
818 VSX_CMP(xvcmpgtsp, 0x0C, 0x09, 0, PPC2_VSX)
819 VSX_CMP(xvcmpnesp, 0x0C, 0x0B, 0, PPC2_VSX)
821 static bool trans_XSCVQPDP(DisasContext *ctx, arg_X_tb_rc *a)
823     TCGv_i32 ro;
824     TCGv_ptr xt, xb;
826     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
827     REQUIRE_VSX(ctx);
829     ro = tcg_const_i32(a->rc);
831     xt = gen_avr_ptr(a->rt);
832     xb = gen_avr_ptr(a->rb);
833     gen_helper_XSCVQPDP(cpu_env, ro, xt, xb);
834     tcg_temp_free_i32(ro);
835     tcg_temp_free_ptr(xt);
836     tcg_temp_free_ptr(xb);
838     return true;
841 static bool do_helper_env_X_tb(DisasContext *ctx, arg_X_tb *a,
842                                void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr))
844     TCGv_ptr xt, xb;
846     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
847     REQUIRE_VSX(ctx);
849     xt = gen_avr_ptr(a->rt);
850     xb = gen_avr_ptr(a->rb);
851     gen_helper(cpu_env, xt, xb);
852     tcg_temp_free_ptr(xt);
853     tcg_temp_free_ptr(xb);
855     return true;
858 TRANS(XSCVUQQP, do_helper_env_X_tb, gen_helper_XSCVUQQP)
859 TRANS(XSCVSQQP, do_helper_env_X_tb, gen_helper_XSCVSQQP)
860 TRANS(XSCVQPUQZ, do_helper_env_X_tb, gen_helper_XSCVQPUQZ)
861 TRANS(XSCVQPSQZ, do_helper_env_X_tb, gen_helper_XSCVQPSQZ)
863 #define GEN_VSX_HELPER_2(name, op1, op2, inval, type)                         \
864 static void gen_##name(DisasContext *ctx)                                     \
865 {                                                                             \
866     TCGv_i32 opc;                                                             \
867     if (unlikely(!ctx->vsx_enabled)) {                                        \
868         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
869         return;                                                               \
870     }                                                                         \
871     opc = tcg_const_i32(ctx->opcode);                                         \
872     gen_helper_##name(cpu_env, opc);                                          \
873     tcg_temp_free_i32(opc);                                                   \
876 #define GEN_VSX_HELPER_X3(name, op1, op2, inval, type)                        \
877 static void gen_##name(DisasContext *ctx)                                     \
878 {                                                                             \
879     TCGv_ptr xt, xa, xb;                                                      \
880     if (unlikely(!ctx->vsx_enabled)) {                                        \
881         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
882         return;                                                               \
883     }                                                                         \
884     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
885     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
886     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
887     gen_helper_##name(cpu_env, xt, xa, xb);                                   \
888     tcg_temp_free_ptr(xt);                                                    \
889     tcg_temp_free_ptr(xa);                                                    \
890     tcg_temp_free_ptr(xb);                                                    \
893 #define GEN_VSX_HELPER_X2(name, op1, op2, inval, type)                        \
894 static void gen_##name(DisasContext *ctx)                                     \
895 {                                                                             \
896     TCGv_ptr xt, xb;                                                          \
897     if (unlikely(!ctx->vsx_enabled)) {                                        \
898         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
899         return;                                                               \
900     }                                                                         \
901     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
902     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
903     gen_helper_##name(cpu_env, xt, xb);                                       \
904     tcg_temp_free_ptr(xt);                                                    \
905     tcg_temp_free_ptr(xb);                                                    \
908 #define GEN_VSX_HELPER_X2_AB(name, op1, op2, inval, type)                     \
909 static void gen_##name(DisasContext *ctx)                                     \
910 {                                                                             \
911     TCGv_i32 opc;                                                             \
912     TCGv_ptr xa, xb;                                                          \
913     if (unlikely(!ctx->vsx_enabled)) {                                        \
914         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
915         return;                                                               \
916     }                                                                         \
917     opc = tcg_const_i32(ctx->opcode);                                         \
918     xa = gen_vsr_ptr(xA(ctx->opcode));                                        \
919     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
920     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
921     tcg_temp_free_i32(opc);                                                   \
922     tcg_temp_free_ptr(xa);                                                    \
923     tcg_temp_free_ptr(xb);                                                    \
926 #define GEN_VSX_HELPER_X1(name, op1, op2, inval, type)                        \
927 static void gen_##name(DisasContext *ctx)                                     \
928 {                                                                             \
929     TCGv_i32 opc;                                                             \
930     TCGv_ptr xb;                                                              \
931     if (unlikely(!ctx->vsx_enabled)) {                                        \
932         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
933         return;                                                               \
934     }                                                                         \
935     opc = tcg_const_i32(ctx->opcode);                                         \
936     xb = gen_vsr_ptr(xB(ctx->opcode));                                        \
937     gen_helper_##name(cpu_env, opc, xb);                                      \
938     tcg_temp_free_i32(opc);                                                   \
939     tcg_temp_free_ptr(xb);                                                    \
942 #define GEN_VSX_HELPER_R3(name, op1, op2, inval, type)                        \
943 static void gen_##name(DisasContext *ctx)                                     \
944 {                                                                             \
945     TCGv_i32 opc;                                                             \
946     TCGv_ptr xt, xa, xb;                                                      \
947     if (unlikely(!ctx->vsx_enabled)) {                                        \
948         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
949         return;                                                               \
950     }                                                                         \
951     opc = tcg_const_i32(ctx->opcode);                                         \
952     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
953     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
954     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
955     gen_helper_##name(cpu_env, opc, xt, xa, xb);                              \
956     tcg_temp_free_i32(opc);                                                   \
957     tcg_temp_free_ptr(xt);                                                    \
958     tcg_temp_free_ptr(xa);                                                    \
959     tcg_temp_free_ptr(xb);                                                    \
962 #define GEN_VSX_HELPER_R2(name, op1, op2, inval, type)                        \
963 static void gen_##name(DisasContext *ctx)                                     \
964 {                                                                             \
965     TCGv_i32 opc;                                                             \
966     TCGv_ptr xt, xb;                                                          \
967     if (unlikely(!ctx->vsx_enabled)) {                                        \
968         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
969         return;                                                               \
970     }                                                                         \
971     opc = tcg_const_i32(ctx->opcode);                                         \
972     xt = gen_vsr_ptr(rD(ctx->opcode) + 32);                                   \
973     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
974     gen_helper_##name(cpu_env, opc, xt, xb);                                  \
975     tcg_temp_free_i32(opc);                                                   \
976     tcg_temp_free_ptr(xt);                                                    \
977     tcg_temp_free_ptr(xb);                                                    \
980 #define GEN_VSX_HELPER_R2_AB(name, op1, op2, inval, type)                     \
981 static void gen_##name(DisasContext *ctx)                                     \
982 {                                                                             \
983     TCGv_i32 opc;                                                             \
984     TCGv_ptr xa, xb;                                                          \
985     if (unlikely(!ctx->vsx_enabled)) {                                        \
986         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
987         return;                                                               \
988     }                                                                         \
989     opc = tcg_const_i32(ctx->opcode);                                         \
990     xa = gen_vsr_ptr(rA(ctx->opcode) + 32);                                   \
991     xb = gen_vsr_ptr(rB(ctx->opcode) + 32);                                   \
992     gen_helper_##name(cpu_env, opc, xa, xb);                                  \
993     tcg_temp_free_i32(opc);                                                   \
994     tcg_temp_free_ptr(xa);                                                    \
995     tcg_temp_free_ptr(xb);                                                    \
998 #define GEN_VSX_HELPER_XT_XB_ENV(name, op1, op2, inval, type) \
999 static void gen_##name(DisasContext *ctx)                     \
1000 {                                                             \
1001     TCGv_i64 t0;                                              \
1002     TCGv_i64 t1;                                              \
1003     if (unlikely(!ctx->vsx_enabled)) {                        \
1004         gen_exception(ctx, POWERPC_EXCP_VSXU);                \
1005         return;                                               \
1006     }                                                         \
1007     t0 = tcg_temp_new_i64();                                  \
1008     t1 = tcg_temp_new_i64();                                  \
1009     get_cpu_vsr(t0, xB(ctx->opcode), true);                   \
1010     gen_helper_##name(t1, cpu_env, t0);                       \
1011     set_cpu_vsr(xT(ctx->opcode), t1, true);                   \
1012     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false); \
1013     tcg_temp_free_i64(t0);                                    \
1014     tcg_temp_free_i64(t1);                                    \
1017 GEN_VSX_HELPER_X3(xsadddp, 0x00, 0x04, 0, PPC2_VSX)
1018 GEN_VSX_HELPER_R3(xsaddqp, 0x04, 0x00, 0, PPC2_ISA300)
1019 GEN_VSX_HELPER_X3(xssubdp, 0x00, 0x05, 0, PPC2_VSX)
1020 GEN_VSX_HELPER_X3(xsmuldp, 0x00, 0x06, 0, PPC2_VSX)
1021 GEN_VSX_HELPER_R3(xsmulqp, 0x04, 0x01, 0, PPC2_ISA300)
1022 GEN_VSX_HELPER_X3(xsdivdp, 0x00, 0x07, 0, PPC2_VSX)
1023 GEN_VSX_HELPER_R3(xsdivqp, 0x04, 0x11, 0, PPC2_ISA300)
1024 GEN_VSX_HELPER_X2(xsredp, 0x14, 0x05, 0, PPC2_VSX)
1025 GEN_VSX_HELPER_X2(xssqrtdp, 0x16, 0x04, 0, PPC2_VSX)
1026 GEN_VSX_HELPER_X2(xsrsqrtedp, 0x14, 0x04, 0, PPC2_VSX)
1027 GEN_VSX_HELPER_X2_AB(xstdivdp, 0x14, 0x07, 0, PPC2_VSX)
1028 GEN_VSX_HELPER_X1(xstsqrtdp, 0x14, 0x06, 0, PPC2_VSX)
1029 GEN_VSX_HELPER_X2_AB(xscmpexpdp, 0x0C, 0x07, 0, PPC2_ISA300)
1030 GEN_VSX_HELPER_R2_AB(xscmpexpqp, 0x04, 0x05, 0, PPC2_ISA300)
1031 GEN_VSX_HELPER_X2_AB(xscmpodp, 0x0C, 0x05, 0, PPC2_VSX)
1032 GEN_VSX_HELPER_X2_AB(xscmpudp, 0x0C, 0x04, 0, PPC2_VSX)
1033 GEN_VSX_HELPER_R2_AB(xscmpoqp, 0x04, 0x04, 0, PPC2_VSX)
1034 GEN_VSX_HELPER_R2_AB(xscmpuqp, 0x04, 0x14, 0, PPC2_VSX)
1035 GEN_VSX_HELPER_X3(xsmaxdp, 0x00, 0x14, 0, PPC2_VSX)
1036 GEN_VSX_HELPER_X3(xsmindp, 0x00, 0x15, 0, PPC2_VSX)
1037 GEN_VSX_HELPER_X2(xscvdphp, 0x16, 0x15, 0x11, PPC2_ISA300)
1038 GEN_VSX_HELPER_X2(xscvdpsp, 0x12, 0x10, 0, PPC2_VSX)
1039 GEN_VSX_HELPER_R2(xscvdpqp, 0x04, 0x1A, 0x16, PPC2_ISA300)
1040 GEN_VSX_HELPER_XT_XB_ENV(xscvdpspn, 0x16, 0x10, 0, PPC2_VSX207)
1041 GEN_VSX_HELPER_R2(xscvqpsdz, 0x04, 0x1A, 0x19, PPC2_ISA300)
1042 GEN_VSX_HELPER_R2(xscvqpswz, 0x04, 0x1A, 0x09, PPC2_ISA300)
1043 GEN_VSX_HELPER_R2(xscvqpudz, 0x04, 0x1A, 0x11, PPC2_ISA300)
1044 GEN_VSX_HELPER_R2(xscvqpuwz, 0x04, 0x1A, 0x01, PPC2_ISA300)
1045 GEN_VSX_HELPER_X2(xscvhpdp, 0x16, 0x15, 0x10, PPC2_ISA300)
1046 GEN_VSX_HELPER_R2(xscvsdqp, 0x04, 0x1A, 0x0A, PPC2_ISA300)
1047 GEN_VSX_HELPER_X2(xscvspdp, 0x12, 0x14, 0, PPC2_VSX)
1049 bool trans_XSCVSPDPN(DisasContext *ctx, arg_XX2 *a)
1051     TCGv_i64 tmp;
1053     REQUIRE_INSNS_FLAGS2(ctx, VSX207);
1054     REQUIRE_VSX(ctx);
1056     tmp = tcg_temp_new_i64();
1057     get_cpu_vsr(tmp, a->xb, true);
1059     gen_helper_XSCVSPDPN(tmp, tmp);
1061     set_cpu_vsr(a->xt, tmp, true);
1062     set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
1064     tcg_temp_free_i64(tmp);
1066     return true;
1069 GEN_VSX_HELPER_X2(xscvdpsxds, 0x10, 0x15, 0, PPC2_VSX)
1070 GEN_VSX_HELPER_X2(xscvdpsxws, 0x10, 0x05, 0, PPC2_VSX)
1071 GEN_VSX_HELPER_X2(xscvdpuxds, 0x10, 0x14, 0, PPC2_VSX)
1072 GEN_VSX_HELPER_X2(xscvdpuxws, 0x10, 0x04, 0, PPC2_VSX)
1073 GEN_VSX_HELPER_X2(xscvsxddp, 0x10, 0x17, 0, PPC2_VSX)
1074 GEN_VSX_HELPER_R2(xscvudqp, 0x04, 0x1A, 0x02, PPC2_ISA300)
1075 GEN_VSX_HELPER_X2(xscvuxddp, 0x10, 0x16, 0, PPC2_VSX)
1076 GEN_VSX_HELPER_X2(xsrdpi, 0x12, 0x04, 0, PPC2_VSX)
1077 GEN_VSX_HELPER_X2(xsrdpic, 0x16, 0x06, 0, PPC2_VSX)
1078 GEN_VSX_HELPER_X2(xsrdpim, 0x12, 0x07, 0, PPC2_VSX)
1079 GEN_VSX_HELPER_X2(xsrdpip, 0x12, 0x06, 0, PPC2_VSX)
1080 GEN_VSX_HELPER_X2(xsrdpiz, 0x12, 0x05, 0, PPC2_VSX)
1081 GEN_VSX_HELPER_XT_XB_ENV(xsrsp, 0x12, 0x11, 0, PPC2_VSX207)
1082 GEN_VSX_HELPER_R2(xsrqpi, 0x05, 0x00, 0, PPC2_ISA300)
1083 GEN_VSX_HELPER_R2(xsrqpxp, 0x05, 0x01, 0, PPC2_ISA300)
1084 GEN_VSX_HELPER_R2(xssqrtqp, 0x04, 0x19, 0x1B, PPC2_ISA300)
1085 GEN_VSX_HELPER_R3(xssubqp, 0x04, 0x10, 0, PPC2_ISA300)
1086 GEN_VSX_HELPER_X3(xsaddsp, 0x00, 0x00, 0, PPC2_VSX207)
1087 GEN_VSX_HELPER_X3(xssubsp, 0x00, 0x01, 0, PPC2_VSX207)
1088 GEN_VSX_HELPER_X3(xsmulsp, 0x00, 0x02, 0, PPC2_VSX207)
1089 GEN_VSX_HELPER_X3(xsdivsp, 0x00, 0x03, 0, PPC2_VSX207)
1090 GEN_VSX_HELPER_X2(xsresp, 0x14, 0x01, 0, PPC2_VSX207)
1091 GEN_VSX_HELPER_X2(xssqrtsp, 0x16, 0x00, 0, PPC2_VSX207)
1092 GEN_VSX_HELPER_X2(xsrsqrtesp, 0x14, 0x00, 0, PPC2_VSX207)
1093 GEN_VSX_HELPER_X2(xscvsxdsp, 0x10, 0x13, 0, PPC2_VSX207)
1094 GEN_VSX_HELPER_X2(xscvuxdsp, 0x10, 0x12, 0, PPC2_VSX207)
1095 GEN_VSX_HELPER_X1(xststdcsp, 0x14, 0x12, 0, PPC2_ISA300)
1096 GEN_VSX_HELPER_2(xststdcdp, 0x14, 0x16, 0, PPC2_ISA300)
1097 GEN_VSX_HELPER_2(xststdcqp, 0x04, 0x16, 0, PPC2_ISA300)
1099 GEN_VSX_HELPER_X3(xvadddp, 0x00, 0x0C, 0, PPC2_VSX)
1100 GEN_VSX_HELPER_X3(xvsubdp, 0x00, 0x0D, 0, PPC2_VSX)
1101 GEN_VSX_HELPER_X3(xvmuldp, 0x00, 0x0E, 0, PPC2_VSX)
1102 GEN_VSX_HELPER_X3(xvdivdp, 0x00, 0x0F, 0, PPC2_VSX)
1103 GEN_VSX_HELPER_X2(xvredp, 0x14, 0x0D, 0, PPC2_VSX)
1104 GEN_VSX_HELPER_X2(xvsqrtdp, 0x16, 0x0C, 0, PPC2_VSX)
1105 GEN_VSX_HELPER_X2(xvrsqrtedp, 0x14, 0x0C, 0, PPC2_VSX)
1106 GEN_VSX_HELPER_X2_AB(xvtdivdp, 0x14, 0x0F, 0, PPC2_VSX)
1107 GEN_VSX_HELPER_X1(xvtsqrtdp, 0x14, 0x0E, 0, PPC2_VSX)
1108 GEN_VSX_HELPER_X3(xvmaxdp, 0x00, 0x1C, 0, PPC2_VSX)
1109 GEN_VSX_HELPER_X3(xvmindp, 0x00, 0x1D, 0, PPC2_VSX)
1110 GEN_VSX_HELPER_X2(xvcvdpsp, 0x12, 0x18, 0, PPC2_VSX)
1111 GEN_VSX_HELPER_X2(xvcvdpsxds, 0x10, 0x1D, 0, PPC2_VSX)
1112 GEN_VSX_HELPER_X2(xvcvdpsxws, 0x10, 0x0D, 0, PPC2_VSX)
1113 GEN_VSX_HELPER_X2(xvcvdpuxds, 0x10, 0x1C, 0, PPC2_VSX)
1114 GEN_VSX_HELPER_X2(xvcvdpuxws, 0x10, 0x0C, 0, PPC2_VSX)
1115 GEN_VSX_HELPER_X2(xvcvsxddp, 0x10, 0x1F, 0, PPC2_VSX)
1116 GEN_VSX_HELPER_X2(xvcvuxddp, 0x10, 0x1E, 0, PPC2_VSX)
1117 GEN_VSX_HELPER_X2(xvcvsxwdp, 0x10, 0x0F, 0, PPC2_VSX)
1118 GEN_VSX_HELPER_X2(xvcvuxwdp, 0x10, 0x0E, 0, PPC2_VSX)
1119 GEN_VSX_HELPER_X2(xvrdpi, 0x12, 0x0C, 0, PPC2_VSX)
1120 GEN_VSX_HELPER_X2(xvrdpic, 0x16, 0x0E, 0, PPC2_VSX)
1121 GEN_VSX_HELPER_X2(xvrdpim, 0x12, 0x0F, 0, PPC2_VSX)
1122 GEN_VSX_HELPER_X2(xvrdpip, 0x12, 0x0E, 0, PPC2_VSX)
1123 GEN_VSX_HELPER_X2(xvrdpiz, 0x12, 0x0D, 0, PPC2_VSX)
1125 GEN_VSX_HELPER_X3(xvaddsp, 0x00, 0x08, 0, PPC2_VSX)
1126 GEN_VSX_HELPER_X3(xvsubsp, 0x00, 0x09, 0, PPC2_VSX)
1127 GEN_VSX_HELPER_X3(xvmulsp, 0x00, 0x0A, 0, PPC2_VSX)
1128 GEN_VSX_HELPER_X3(xvdivsp, 0x00, 0x0B, 0, PPC2_VSX)
1129 GEN_VSX_HELPER_X2(xvresp, 0x14, 0x09, 0, PPC2_VSX)
1130 GEN_VSX_HELPER_X2(xvsqrtsp, 0x16, 0x08, 0, PPC2_VSX)
1131 GEN_VSX_HELPER_X2(xvrsqrtesp, 0x14, 0x08, 0, PPC2_VSX)
1132 GEN_VSX_HELPER_X2_AB(xvtdivsp, 0x14, 0x0B, 0, PPC2_VSX)
1133 GEN_VSX_HELPER_X1(xvtsqrtsp, 0x14, 0x0A, 0, PPC2_VSX)
1134 GEN_VSX_HELPER_X3(xvmaxsp, 0x00, 0x18, 0, PPC2_VSX)
1135 GEN_VSX_HELPER_X3(xvminsp, 0x00, 0x19, 0, PPC2_VSX)
1136 GEN_VSX_HELPER_X2(xvcvspdp, 0x12, 0x1C, 0, PPC2_VSX)
1137 GEN_VSX_HELPER_X2(xvcvhpsp, 0x16, 0x1D, 0x18, PPC2_ISA300)
1138 GEN_VSX_HELPER_X2(xvcvsphp, 0x16, 0x1D, 0x19, PPC2_ISA300)
1139 GEN_VSX_HELPER_X2(xvcvspsxds, 0x10, 0x19, 0, PPC2_VSX)
1140 GEN_VSX_HELPER_X2(xvcvspsxws, 0x10, 0x09, 0, PPC2_VSX)
1141 GEN_VSX_HELPER_X2(xvcvspuxds, 0x10, 0x18, 0, PPC2_VSX)
1142 GEN_VSX_HELPER_X2(xvcvspuxws, 0x10, 0x08, 0, PPC2_VSX)
1143 GEN_VSX_HELPER_X2(xvcvsxdsp, 0x10, 0x1B, 0, PPC2_VSX)
1144 GEN_VSX_HELPER_X2(xvcvuxdsp, 0x10, 0x1A, 0, PPC2_VSX)
1145 GEN_VSX_HELPER_X2(xvcvsxwsp, 0x10, 0x0B, 0, PPC2_VSX)
1146 GEN_VSX_HELPER_X2(xvcvuxwsp, 0x10, 0x0A, 0, PPC2_VSX)
1147 GEN_VSX_HELPER_X2(xvrspi, 0x12, 0x08, 0, PPC2_VSX)
1148 GEN_VSX_HELPER_X2(xvrspic, 0x16, 0x0A, 0, PPC2_VSX)
1149 GEN_VSX_HELPER_X2(xvrspim, 0x12, 0x0B, 0, PPC2_VSX)
1150 GEN_VSX_HELPER_X2(xvrspip, 0x12, 0x0A, 0, PPC2_VSX)
1151 GEN_VSX_HELPER_X2(xvrspiz, 0x12, 0x09, 0, PPC2_VSX)
1152 GEN_VSX_HELPER_2(xvtstdcsp, 0x14, 0x1A, 0, PPC2_VSX)
1153 GEN_VSX_HELPER_2(xvtstdcdp, 0x14, 0x1E, 0, PPC2_VSX)
1155 static bool trans_XXPERM(DisasContext *ctx, arg_XX3 *a)
1157     TCGv_ptr xt, xa, xb;
1159     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1160     REQUIRE_VSX(ctx);
1162     xt = gen_vsr_ptr(a->xt);
1163     xa = gen_vsr_ptr(a->xa);
1164     xb = gen_vsr_ptr(a->xb);
1166     gen_helper_VPERM(xt, xa, xt, xb);
1168     tcg_temp_free_ptr(xt);
1169     tcg_temp_free_ptr(xa);
1170     tcg_temp_free_ptr(xb);
1172     return true;
1175 static bool trans_XXPERMR(DisasContext *ctx, arg_XX3 *a)
1177     TCGv_ptr xt, xa, xb;
1179     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1180     REQUIRE_VSX(ctx);
1182     xt = gen_vsr_ptr(a->xt);
1183     xa = gen_vsr_ptr(a->xa);
1184     xb = gen_vsr_ptr(a->xb);
1186     gen_helper_VPERMR(xt, xa, xt, xb);
1188     tcg_temp_free_ptr(xt);
1189     tcg_temp_free_ptr(xa);
1190     tcg_temp_free_ptr(xb);
1192     return true;
1195 static bool trans_XXPERMDI(DisasContext *ctx, arg_XX3_dm *a)
1197     TCGv_i64 t0, t1;
1199     REQUIRE_INSNS_FLAGS2(ctx, VSX);
1200     REQUIRE_VSX(ctx);
1202     t0 = tcg_temp_new_i64();
1204     if (unlikely(a->xt == a->xa || a->xt == a->xb)) {
1205         t1 = tcg_temp_new_i64();
1207         get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1208         get_cpu_vsr(t1, a->xb, (a->dm & 1) == 0);
1210         set_cpu_vsr(a->xt, t0, true);
1211         set_cpu_vsr(a->xt, t1, false);
1213         tcg_temp_free_i64(t1);
1214     } else {
1215         get_cpu_vsr(t0, a->xa, (a->dm & 2) == 0);
1216         set_cpu_vsr(a->xt, t0, true);
1218         get_cpu_vsr(t0, a->xb, (a->dm & 1) == 0);
1219         set_cpu_vsr(a->xt, t0, false);
1220     }
1222     tcg_temp_free_i64(t0);
1224     return true;
1227 static bool trans_XXPERMX(DisasContext *ctx, arg_8RR_XX4_uim3 *a)
1229     TCGv_ptr xt, xa, xb, xc;
1231     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1232     REQUIRE_VSX(ctx);
1234     xt = gen_vsr_ptr(a->xt);
1235     xa = gen_vsr_ptr(a->xa);
1236     xb = gen_vsr_ptr(a->xb);
1237     xc = gen_vsr_ptr(a->xc);
1239     gen_helper_XXPERMX(xt, xa, xb, xc, tcg_constant_tl(a->uim3));
1241     tcg_temp_free_ptr(xt);
1242     tcg_temp_free_ptr(xa);
1243     tcg_temp_free_ptr(xb);
1244     tcg_temp_free_ptr(xc);
1246     return true;
1249 typedef void (*xxgenpcv_genfn)(TCGv_ptr, TCGv_ptr);
1251 static bool do_xxgenpcv(DisasContext *ctx, arg_X_imm5 *a,
1252                         const xxgenpcv_genfn fn[4])
1254     TCGv_ptr xt, vrb;
1256     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1257     REQUIRE_VSX(ctx);
1259     if (a->imm & ~0x3) {
1260         gen_invalid(ctx);
1261         return true;
1262     }
1264     xt = gen_vsr_ptr(a->xt);
1265     vrb = gen_avr_ptr(a->vrb);
1267     fn[a->imm](xt, vrb);
1269     tcg_temp_free_ptr(xt);
1270     tcg_temp_free_ptr(vrb);
1272     return true;
1275 #define XXGENPCV(NAME) \
1276     static bool trans_##NAME(DisasContext *ctx, arg_X_imm5 *a)  \
1277     {                                                           \
1278         static const xxgenpcv_genfn fn[4] = {                   \
1279             gen_helper_##NAME##_be_exp,                         \
1280             gen_helper_##NAME##_be_comp,                        \
1281             gen_helper_##NAME##_le_exp,                         \
1282             gen_helper_##NAME##_le_comp,                        \
1283         };                                                      \
1284         return do_xxgenpcv(ctx, a, fn);                         \
1285     }
1287 XXGENPCV(XXGENPCVBM)
1288 XXGENPCV(XXGENPCVHM)
1289 XXGENPCV(XXGENPCVWM)
1290 XXGENPCV(XXGENPCVDM)
1291 #undef XXGENPCV
1293 static bool do_xsmadd(DisasContext *ctx, int tgt, int src1, int src2, int src3,
1294         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1296     TCGv_ptr t, s1, s2, s3;
1298     t = gen_vsr_ptr(tgt);
1299     s1 = gen_vsr_ptr(src1);
1300     s2 = gen_vsr_ptr(src2);
1301     s3 = gen_vsr_ptr(src3);
1303     gen_helper(cpu_env, t, s1, s2, s3);
1305     tcg_temp_free_ptr(t);
1306     tcg_temp_free_ptr(s1);
1307     tcg_temp_free_ptr(s2);
1308     tcg_temp_free_ptr(s3);
1310     return true;
1313 static bool do_xsmadd_XX3(DisasContext *ctx, arg_XX3 *a, bool type_a,
1314         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1316     REQUIRE_VSX(ctx);
1318     if (type_a) {
1319         return do_xsmadd(ctx, a->xt, a->xa, a->xt, a->xb, gen_helper);
1320     }
1321     return do_xsmadd(ctx, a->xt, a->xa, a->xb, a->xt, gen_helper);
1324 TRANS_FLAGS2(VSX, XSMADDADP, do_xsmadd_XX3, true, gen_helper_XSMADDDP)
1325 TRANS_FLAGS2(VSX, XSMADDMDP, do_xsmadd_XX3, false, gen_helper_XSMADDDP)
1326 TRANS_FLAGS2(VSX, XSMSUBADP, do_xsmadd_XX3, true, gen_helper_XSMSUBDP)
1327 TRANS_FLAGS2(VSX, XSMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSMSUBDP)
1328 TRANS_FLAGS2(VSX, XSNMADDADP, do_xsmadd_XX3, true, gen_helper_XSNMADDDP)
1329 TRANS_FLAGS2(VSX, XSNMADDMDP, do_xsmadd_XX3, false, gen_helper_XSNMADDDP)
1330 TRANS_FLAGS2(VSX, XSNMSUBADP, do_xsmadd_XX3, true, gen_helper_XSNMSUBDP)
1331 TRANS_FLAGS2(VSX, XSNMSUBMDP, do_xsmadd_XX3, false, gen_helper_XSNMSUBDP)
1332 TRANS_FLAGS2(VSX207, XSMADDASP, do_xsmadd_XX3, true, gen_helper_XSMADDSP)
1333 TRANS_FLAGS2(VSX207, XSMADDMSP, do_xsmadd_XX3, false, gen_helper_XSMADDSP)
1334 TRANS_FLAGS2(VSX207, XSMSUBASP, do_xsmadd_XX3, true, gen_helper_XSMSUBSP)
1335 TRANS_FLAGS2(VSX207, XSMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSMSUBSP)
1336 TRANS_FLAGS2(VSX207, XSNMADDASP, do_xsmadd_XX3, true, gen_helper_XSNMADDSP)
1337 TRANS_FLAGS2(VSX207, XSNMADDMSP, do_xsmadd_XX3, false, gen_helper_XSNMADDSP)
1338 TRANS_FLAGS2(VSX207, XSNMSUBASP, do_xsmadd_XX3, true, gen_helper_XSNMSUBSP)
1339 TRANS_FLAGS2(VSX207, XSNMSUBMSP, do_xsmadd_XX3, false, gen_helper_XSNMSUBSP)
1341 static bool do_xsmadd_X(DisasContext *ctx, arg_X_rc *a,
1342         void (*gen_helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr),
1343         void (*gen_helper_ro)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
1345     int vrt, vra, vrb;
1347     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
1348     REQUIRE_VSX(ctx);
1350     vrt = a->rt + 32;
1351     vra = a->ra + 32;
1352     vrb = a->rb + 32;
1354     if (a->rc) {
1355         return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper_ro);
1356     }
1358     return do_xsmadd(ctx, vrt, vra, vrt, vrb, gen_helper);
1361 TRANS(XSMADDQP, do_xsmadd_X, gen_helper_XSMADDQP, gen_helper_XSMADDQPO)
1362 TRANS(XSMSUBQP, do_xsmadd_X, gen_helper_XSMSUBQP, gen_helper_XSMSUBQPO)
1363 TRANS(XSNMADDQP, do_xsmadd_X, gen_helper_XSNMADDQP, gen_helper_XSNMADDQPO)
1364 TRANS(XSNMSUBQP, do_xsmadd_X, gen_helper_XSNMSUBQP, gen_helper_XSNMSUBQPO)
1366 #define GEN_VSX_HELPER_VSX_MADD(name, op1, aop, mop, inval, type)             \
1367 static void gen_##name(DisasContext *ctx)                                     \
1368 {                                                                             \
1369     TCGv_ptr xt, s1, s2, s3;                                                  \
1370     if (unlikely(!ctx->vsx_enabled)) {                                        \
1371         gen_exception(ctx, POWERPC_EXCP_VSXU);                                \
1372         return;                                                               \
1373     }                                                                         \
1374     xt = gen_vsr_ptr(xT(ctx->opcode));                                        \
1375     s1 = gen_vsr_ptr(xA(ctx->opcode));                                        \
1376     if (ctx->opcode & PPC_BIT32(25)) {                                        \
1377         /*                                                                    \
1378          * AxT + B                                                            \
1379          */                                                                   \
1380         s2 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1381         s3 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1382     } else {                                                                  \
1383         /*                                                                    \
1384          * AxB + T                                                            \
1385          */                                                                   \
1386         s2 = gen_vsr_ptr(xT(ctx->opcode));                                    \
1387         s3 = gen_vsr_ptr(xB(ctx->opcode));                                    \
1388     }                                                                         \
1389     gen_helper_##name(cpu_env, xt, s1, s2, s3);                               \
1390     tcg_temp_free_ptr(xt);                                                    \
1391     tcg_temp_free_ptr(s1);                                                    \
1392     tcg_temp_free_ptr(s2);                                                    \
1393     tcg_temp_free_ptr(s3);                                                    \
1396 GEN_VSX_HELPER_VSX_MADD(xvmadddp, 0x04, 0x0C, 0x0D, 0, PPC2_VSX)
1397 GEN_VSX_HELPER_VSX_MADD(xvmsubdp, 0x04, 0x0E, 0x0F, 0, PPC2_VSX)
1398 GEN_VSX_HELPER_VSX_MADD(xvnmadddp, 0x04, 0x1C, 0x1D, 0, PPC2_VSX)
1399 GEN_VSX_HELPER_VSX_MADD(xvnmsubdp, 0x04, 0x1E, 0x1F, 0, PPC2_VSX)
1400 GEN_VSX_HELPER_VSX_MADD(xvmaddsp, 0x04, 0x08, 0x09, 0, PPC2_VSX)
1401 GEN_VSX_HELPER_VSX_MADD(xvmsubsp, 0x04, 0x0A, 0x0B, 0, PPC2_VSX)
1402 GEN_VSX_HELPER_VSX_MADD(xvnmaddsp, 0x04, 0x18, 0x19, 0, PPC2_VSX)
1403 GEN_VSX_HELPER_VSX_MADD(xvnmsubsp, 0x04, 0x1A, 0x1B, 0, PPC2_VSX)
1405 static void gen_xxbrd(DisasContext *ctx)
1407     TCGv_i64 xth;
1408     TCGv_i64 xtl;
1409     TCGv_i64 xbh;
1410     TCGv_i64 xbl;
1412     if (unlikely(!ctx->vsx_enabled)) {
1413         gen_exception(ctx, POWERPC_EXCP_VSXU);
1414         return;
1415     }
1416     xth = tcg_temp_new_i64();
1417     xtl = tcg_temp_new_i64();
1418     xbh = tcg_temp_new_i64();
1419     xbl = tcg_temp_new_i64();
1420     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1421     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1423     tcg_gen_bswap64_i64(xth, xbh);
1424     tcg_gen_bswap64_i64(xtl, xbl);
1425     set_cpu_vsr(xT(ctx->opcode), xth, true);
1426     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1428     tcg_temp_free_i64(xth);
1429     tcg_temp_free_i64(xtl);
1430     tcg_temp_free_i64(xbh);
1431     tcg_temp_free_i64(xbl);
1434 static void gen_xxbrh(DisasContext *ctx)
1436     TCGv_i64 xth;
1437     TCGv_i64 xtl;
1438     TCGv_i64 xbh;
1439     TCGv_i64 xbl;
1441     if (unlikely(!ctx->vsx_enabled)) {
1442         gen_exception(ctx, POWERPC_EXCP_VSXU);
1443         return;
1444     }
1445     xth = tcg_temp_new_i64();
1446     xtl = tcg_temp_new_i64();
1447     xbh = tcg_temp_new_i64();
1448     xbl = tcg_temp_new_i64();
1449     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1450     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1452     gen_bswap16x8(xth, xtl, xbh, xbl);
1453     set_cpu_vsr(xT(ctx->opcode), xth, true);
1454     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1456     tcg_temp_free_i64(xth);
1457     tcg_temp_free_i64(xtl);
1458     tcg_temp_free_i64(xbh);
1459     tcg_temp_free_i64(xbl);
1462 static void gen_xxbrq(DisasContext *ctx)
1464     TCGv_i64 xth;
1465     TCGv_i64 xtl;
1466     TCGv_i64 xbh;
1467     TCGv_i64 xbl;
1468     TCGv_i64 t0;
1470     if (unlikely(!ctx->vsx_enabled)) {
1471         gen_exception(ctx, POWERPC_EXCP_VSXU);
1472         return;
1473     }
1474     xth = tcg_temp_new_i64();
1475     xtl = tcg_temp_new_i64();
1476     xbh = tcg_temp_new_i64();
1477     xbl = tcg_temp_new_i64();
1478     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1479     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1480     t0 = tcg_temp_new_i64();
1482     tcg_gen_bswap64_i64(t0, xbl);
1483     tcg_gen_bswap64_i64(xtl, xbh);
1484     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1485     tcg_gen_mov_i64(xth, t0);
1486     set_cpu_vsr(xT(ctx->opcode), xth, true);
1488     tcg_temp_free_i64(t0);
1489     tcg_temp_free_i64(xth);
1490     tcg_temp_free_i64(xtl);
1491     tcg_temp_free_i64(xbh);
1492     tcg_temp_free_i64(xbl);
1495 static void gen_xxbrw(DisasContext *ctx)
1497     TCGv_i64 xth;
1498     TCGv_i64 xtl;
1499     TCGv_i64 xbh;
1500     TCGv_i64 xbl;
1502     if (unlikely(!ctx->vsx_enabled)) {
1503         gen_exception(ctx, POWERPC_EXCP_VSXU);
1504         return;
1505     }
1506     xth = tcg_temp_new_i64();
1507     xtl = tcg_temp_new_i64();
1508     xbh = tcg_temp_new_i64();
1509     xbl = tcg_temp_new_i64();
1510     get_cpu_vsr(xbh, xB(ctx->opcode), true);
1511     get_cpu_vsr(xbl, xB(ctx->opcode), false);
1513     gen_bswap32x4(xth, xtl, xbh, xbl);
1514     set_cpu_vsr(xT(ctx->opcode), xth, true);
1515     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1517     tcg_temp_free_i64(xth);
1518     tcg_temp_free_i64(xtl);
1519     tcg_temp_free_i64(xbh);
1520     tcg_temp_free_i64(xbl);
1523 #define VSX_LOGICAL(name, vece, tcg_op)                              \
1524 static void glue(gen_, name)(DisasContext *ctx)                      \
1525     {                                                                \
1526         if (unlikely(!ctx->vsx_enabled)) {                           \
1527             gen_exception(ctx, POWERPC_EXCP_VSXU);                   \
1528             return;                                                  \
1529         }                                                            \
1530         tcg_op(vece, vsr_full_offset(xT(ctx->opcode)),               \
1531                vsr_full_offset(xA(ctx->opcode)),                     \
1532                vsr_full_offset(xB(ctx->opcode)), 16, 16);            \
1533     }
1535 VSX_LOGICAL(xxland, MO_64, tcg_gen_gvec_and)
1536 VSX_LOGICAL(xxlandc, MO_64, tcg_gen_gvec_andc)
1537 VSX_LOGICAL(xxlor, MO_64, tcg_gen_gvec_or)
1538 VSX_LOGICAL(xxlxor, MO_64, tcg_gen_gvec_xor)
1539 VSX_LOGICAL(xxlnor, MO_64, tcg_gen_gvec_nor)
1540 VSX_LOGICAL(xxleqv, MO_64, tcg_gen_gvec_eqv)
1541 VSX_LOGICAL(xxlnand, MO_64, tcg_gen_gvec_nand)
1542 VSX_LOGICAL(xxlorc, MO_64, tcg_gen_gvec_orc)
1544 #define VSX_XXMRG(name, high)                               \
1545 static void glue(gen_, name)(DisasContext *ctx)             \
1546     {                                                       \
1547         TCGv_i64 a0, a1, b0, b1, tmp;                       \
1548         if (unlikely(!ctx->vsx_enabled)) {                  \
1549             gen_exception(ctx, POWERPC_EXCP_VSXU);          \
1550             return;                                         \
1551         }                                                   \
1552         a0 = tcg_temp_new_i64();                            \
1553         a1 = tcg_temp_new_i64();                            \
1554         b0 = tcg_temp_new_i64();                            \
1555         b1 = tcg_temp_new_i64();                            \
1556         tmp = tcg_temp_new_i64();                           \
1557         get_cpu_vsr(a0, xA(ctx->opcode), high);             \
1558         get_cpu_vsr(a1, xA(ctx->opcode), high);             \
1559         get_cpu_vsr(b0, xB(ctx->opcode), high);             \
1560         get_cpu_vsr(b1, xB(ctx->opcode), high);             \
1561         tcg_gen_shri_i64(a0, a0, 32);                       \
1562         tcg_gen_shri_i64(b0, b0, 32);                       \
1563         tcg_gen_deposit_i64(tmp, b0, a0, 32, 32);           \
1564         set_cpu_vsr(xT(ctx->opcode), tmp, true);            \
1565         tcg_gen_deposit_i64(tmp, b1, a1, 32, 32);           \
1566         set_cpu_vsr(xT(ctx->opcode), tmp, false);           \
1567         tcg_temp_free_i64(a0);                              \
1568         tcg_temp_free_i64(a1);                              \
1569         tcg_temp_free_i64(b0);                              \
1570         tcg_temp_free_i64(b1);                              \
1571         tcg_temp_free_i64(tmp);                             \
1572     }
1574 VSX_XXMRG(xxmrghw, 1)
1575 VSX_XXMRG(xxmrglw, 0)
1577 static bool trans_XXSEL(DisasContext *ctx, arg_XX4 *a)
1579     REQUIRE_INSNS_FLAGS2(ctx, VSX);
1580     REQUIRE_VSX(ctx);
1582     tcg_gen_gvec_bitsel(MO_64, vsr_full_offset(a->xt), vsr_full_offset(a->xc),
1583                         vsr_full_offset(a->xb), vsr_full_offset(a->xa), 16, 16);
1585     return true;
1588 static bool trans_XXSPLTW(DisasContext *ctx, arg_XX2_uim2 *a)
1590     int tofs, bofs;
1592     REQUIRE_VSX(ctx);
1594     tofs = vsr_full_offset(a->xt);
1595     bofs = vsr_full_offset(a->xb);
1596     bofs += a->uim << MO_32;
1597 #if !HOST_BIG_ENDIAN
1598     bofs ^= 8 | 4;
1599 #endif
1601     tcg_gen_gvec_dup_mem(MO_32, tofs, bofs, 16, 16);
1602     return true;
1605 #define pattern(x) (((x) & 0xff) * (~(uint64_t)0 / 0xff))
1607 static bool trans_XXSPLTIB(DisasContext *ctx, arg_X_imm8 *a)
1609     if (a->xt < 32) {
1610         REQUIRE_VSX(ctx);
1611     } else {
1612         REQUIRE_VECTOR(ctx);
1613     }
1614     tcg_gen_gvec_dup_imm(MO_8, vsr_full_offset(a->xt), 16, 16, a->imm);
1615     return true;
1618 static bool trans_XXSPLTIW(DisasContext *ctx, arg_8RR_D *a)
1620     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1621     REQUIRE_VSX(ctx);
1623     tcg_gen_gvec_dup_imm(MO_32, vsr_full_offset(a->xt), 16, 16, a->si);
1625     return true;
1628 static bool trans_XXSPLTIDP(DisasContext *ctx, arg_8RR_D *a)
1630     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1631     REQUIRE_VSX(ctx);
1633     tcg_gen_gvec_dup_imm(MO_64, vsr_full_offset(a->xt), 16, 16,
1634                          helper_todouble(a->si));
1635     return true;
1638 static bool trans_XXSPLTI32DX(DisasContext *ctx, arg_8RR_D_IX *a)
1640     TCGv_i32 imm;
1642     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1643     REQUIRE_VSX(ctx);
1645     imm = tcg_constant_i32(a->si);
1647     tcg_gen_st_i32(imm, cpu_env,
1648         offsetof(CPUPPCState, vsr[a->xt].VsrW(0 + a->ix)));
1649     tcg_gen_st_i32(imm, cpu_env,
1650         offsetof(CPUPPCState, vsr[a->xt].VsrW(2 + a->ix)));
1652     return true;
1655 static bool trans_LXVKQ(DisasContext *ctx, arg_X_uim5 *a)
1657     static const uint64_t values[32] = {
1658         0, /* Unspecified */
1659         0x3FFF000000000000llu, /* QP +1.0 */
1660         0x4000000000000000llu, /* QP +2.0 */
1661         0x4000800000000000llu, /* QP +3.0 */
1662         0x4001000000000000llu, /* QP +4.0 */
1663         0x4001400000000000llu, /* QP +5.0 */
1664         0x4001800000000000llu, /* QP +6.0 */
1665         0x4001C00000000000llu, /* QP +7.0 */
1666         0x7FFF000000000000llu, /* QP +Inf */
1667         0x7FFF800000000000llu, /* QP dQNaN */
1668         0, /* Unspecified */
1669         0, /* Unspecified */
1670         0, /* Unspecified */
1671         0, /* Unspecified */
1672         0, /* Unspecified */
1673         0, /* Unspecified */
1674         0x8000000000000000llu, /* QP -0.0 */
1675         0xBFFF000000000000llu, /* QP -1.0 */
1676         0xC000000000000000llu, /* QP -2.0 */
1677         0xC000800000000000llu, /* QP -3.0 */
1678         0xC001000000000000llu, /* QP -4.0 */
1679         0xC001400000000000llu, /* QP -5.0 */
1680         0xC001800000000000llu, /* QP -6.0 */
1681         0xC001C00000000000llu, /* QP -7.0 */
1682         0xFFFF000000000000llu, /* QP -Inf */
1683     };
1685     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1686     REQUIRE_VSX(ctx);
1688     if (values[a->uim]) {
1689         set_cpu_vsr(a->xt, tcg_constant_i64(0x0), false);
1690         set_cpu_vsr(a->xt, tcg_constant_i64(values[a->uim]), true);
1691     } else {
1692         gen_invalid(ctx);
1693     }
1695     return true;
1698 static bool trans_XVTLSBB(DisasContext *ctx, arg_XX2_bf_xb *a)
1700     TCGv_i64 xb, t0, t1, all_true, all_false, mask, zero;
1702     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
1703     REQUIRE_VSX(ctx);
1705     xb = tcg_temp_new_i64();
1706     t0 = tcg_temp_new_i64();
1707     t1 = tcg_temp_new_i64();
1708     all_true = tcg_temp_new_i64();
1709     all_false = tcg_temp_new_i64();
1710     mask = tcg_constant_i64(dup_const(MO_8, 1));
1711     zero = tcg_constant_i64(0);
1713     get_cpu_vsr(xb, a->xb, true);
1714     tcg_gen_and_i64(t0, mask, xb);
1715     get_cpu_vsr(xb, a->xb, false);
1716     tcg_gen_and_i64(t1, mask, xb);
1718     tcg_gen_or_i64(all_false, t0, t1);
1719     tcg_gen_and_i64(all_true, t0, t1);
1721     tcg_gen_setcond_i64(TCG_COND_EQ, all_false, all_false, zero);
1722     tcg_gen_shli_i64(all_false, all_false, 1);
1723     tcg_gen_setcond_i64(TCG_COND_EQ, all_true, all_true, mask);
1724     tcg_gen_shli_i64(all_true, all_true, 3);
1726     tcg_gen_or_i64(t0, all_false, all_true);
1727     tcg_gen_extrl_i64_i32(cpu_crf[a->bf], t0);
1729     tcg_temp_free_i64(xb);
1730     tcg_temp_free_i64(t0);
1731     tcg_temp_free_i64(t1);
1732     tcg_temp_free_i64(all_true);
1733     tcg_temp_free_i64(all_false);
1735     return true;
1738 static void gen_xxsldwi(DisasContext *ctx)
1740     TCGv_i64 xth, xtl;
1741     if (unlikely(!ctx->vsx_enabled)) {
1742         gen_exception(ctx, POWERPC_EXCP_VSXU);
1743         return;
1744     }
1745     xth = tcg_temp_new_i64();
1746     xtl = tcg_temp_new_i64();
1748     switch (SHW(ctx->opcode)) {
1749         case 0: {
1750             get_cpu_vsr(xth, xA(ctx->opcode), true);
1751             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1752             break;
1753         }
1754         case 1: {
1755             TCGv_i64 t0 = tcg_temp_new_i64();
1756             get_cpu_vsr(xth, xA(ctx->opcode), true);
1757             tcg_gen_shli_i64(xth, xth, 32);
1758             get_cpu_vsr(t0, xA(ctx->opcode), false);
1759             tcg_gen_shri_i64(t0, t0, 32);
1760             tcg_gen_or_i64(xth, xth, t0);
1761             get_cpu_vsr(xtl, xA(ctx->opcode), false);
1762             tcg_gen_shli_i64(xtl, xtl, 32);
1763             get_cpu_vsr(t0, xB(ctx->opcode), true);
1764             tcg_gen_shri_i64(t0, t0, 32);
1765             tcg_gen_or_i64(xtl, xtl, t0);
1766             tcg_temp_free_i64(t0);
1767             break;
1768         }
1769         case 2: {
1770             get_cpu_vsr(xth, xA(ctx->opcode), false);
1771             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1772             break;
1773         }
1774         case 3: {
1775             TCGv_i64 t0 = tcg_temp_new_i64();
1776             get_cpu_vsr(xth, xA(ctx->opcode), false);
1777             tcg_gen_shli_i64(xth, xth, 32);
1778             get_cpu_vsr(t0, xB(ctx->opcode), true);
1779             tcg_gen_shri_i64(t0, t0, 32);
1780             tcg_gen_or_i64(xth, xth, t0);
1781             get_cpu_vsr(xtl, xB(ctx->opcode), true);
1782             tcg_gen_shli_i64(xtl, xtl, 32);
1783             get_cpu_vsr(t0, xB(ctx->opcode), false);
1784             tcg_gen_shri_i64(t0, t0, 32);
1785             tcg_gen_or_i64(xtl, xtl, t0);
1786             tcg_temp_free_i64(t0);
1787             break;
1788         }
1789     }
1791     set_cpu_vsr(xT(ctx->opcode), xth, true);
1792     set_cpu_vsr(xT(ctx->opcode), xtl, false);
1794     tcg_temp_free_i64(xth);
1795     tcg_temp_free_i64(xtl);
1798 #define VSX_EXTRACT_INSERT(name)                                \
1799 static void gen_##name(DisasContext *ctx)                       \
1800 {                                                               \
1801     TCGv_ptr xt, xb;                                            \
1802     TCGv_i32 t0;                                                \
1803     TCGv_i64 t1;                                                \
1804     uint8_t uimm = UIMM4(ctx->opcode);                          \
1805                                                                 \
1806     if (unlikely(!ctx->vsx_enabled)) {                          \
1807         gen_exception(ctx, POWERPC_EXCP_VSXU);                  \
1808         return;                                                 \
1809     }                                                           \
1810     xt = gen_vsr_ptr(xT(ctx->opcode));                          \
1811     xb = gen_vsr_ptr(xB(ctx->opcode));                          \
1812     t0 = tcg_temp_new_i32();                                    \
1813     t1 = tcg_temp_new_i64();                                    \
1814     /*                                                          \
1815      * uimm > 15 out of bound and for                           \
1816      * uimm > 12 handle as per hardware in helper               \
1817      */                                                         \
1818     if (uimm > 15) {                                            \
1819         tcg_gen_movi_i64(t1, 0);                                \
1820         set_cpu_vsr(xT(ctx->opcode), t1, true);                 \
1821         set_cpu_vsr(xT(ctx->opcode), t1, false);                \
1822         return;                                                 \
1823     }                                                           \
1824     tcg_gen_movi_i32(t0, uimm);                                 \
1825     gen_helper_##name(cpu_env, xt, xb, t0);                     \
1826     tcg_temp_free_ptr(xb);                                      \
1827     tcg_temp_free_ptr(xt);                                      \
1828     tcg_temp_free_i32(t0);                                      \
1829     tcg_temp_free_i64(t1);                                      \
1832 VSX_EXTRACT_INSERT(xxextractuw)
1833 VSX_EXTRACT_INSERT(xxinsertw)
1835 #ifdef TARGET_PPC64
1836 static void gen_xsxexpdp(DisasContext *ctx)
1838     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1839     TCGv_i64 t0;
1840     if (unlikely(!ctx->vsx_enabled)) {
1841         gen_exception(ctx, POWERPC_EXCP_VSXU);
1842         return;
1843     }
1844     t0 = tcg_temp_new_i64();
1845     get_cpu_vsr(t0, xB(ctx->opcode), true);
1846     tcg_gen_extract_i64(rt, t0, 52, 11);
1847     tcg_temp_free_i64(t0);
1850 static void gen_xsxexpqp(DisasContext *ctx)
1852     TCGv_i64 xth;
1853     TCGv_i64 xtl;
1854     TCGv_i64 xbh;
1856     if (unlikely(!ctx->vsx_enabled)) {
1857         gen_exception(ctx, POWERPC_EXCP_VSXU);
1858         return;
1859     }
1860     xth = tcg_temp_new_i64();
1861     xtl = tcg_temp_new_i64();
1862     xbh = tcg_temp_new_i64();
1863     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1865     tcg_gen_extract_i64(xth, xbh, 48, 15);
1866     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1867     tcg_gen_movi_i64(xtl, 0);
1868     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1870     tcg_temp_free_i64(xbh);
1871     tcg_temp_free_i64(xth);
1872     tcg_temp_free_i64(xtl);
1875 static void gen_xsiexpdp(DisasContext *ctx)
1877     TCGv_i64 xth;
1878     TCGv ra = cpu_gpr[rA(ctx->opcode)];
1879     TCGv rb = cpu_gpr[rB(ctx->opcode)];
1880     TCGv_i64 t0;
1882     if (unlikely(!ctx->vsx_enabled)) {
1883         gen_exception(ctx, POWERPC_EXCP_VSXU);
1884         return;
1885     }
1886     t0 = tcg_temp_new_i64();
1887     xth = tcg_temp_new_i64();
1888     tcg_gen_andi_i64(xth, ra, 0x800FFFFFFFFFFFFF);
1889     tcg_gen_andi_i64(t0, rb, 0x7FF);
1890     tcg_gen_shli_i64(t0, t0, 52);
1891     tcg_gen_or_i64(xth, xth, t0);
1892     set_cpu_vsr(xT(ctx->opcode), xth, true);
1893     set_cpu_vsr(xT(ctx->opcode), tcg_constant_i64(0), false);
1894     tcg_temp_free_i64(t0);
1895     tcg_temp_free_i64(xth);
1898 static void gen_xsiexpqp(DisasContext *ctx)
1900     TCGv_i64 xth;
1901     TCGv_i64 xtl;
1902     TCGv_i64 xah;
1903     TCGv_i64 xal;
1904     TCGv_i64 xbh;
1905     TCGv_i64 t0;
1907     if (unlikely(!ctx->vsx_enabled)) {
1908         gen_exception(ctx, POWERPC_EXCP_VSXU);
1909         return;
1910     }
1911     xth = tcg_temp_new_i64();
1912     xtl = tcg_temp_new_i64();
1913     xah = tcg_temp_new_i64();
1914     xal = tcg_temp_new_i64();
1915     get_cpu_vsr(xah, rA(ctx->opcode) + 32, true);
1916     get_cpu_vsr(xal, rA(ctx->opcode) + 32, false);
1917     xbh = tcg_temp_new_i64();
1918     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1919     t0 = tcg_temp_new_i64();
1921     tcg_gen_andi_i64(xth, xah, 0x8000FFFFFFFFFFFF);
1922     tcg_gen_andi_i64(t0, xbh, 0x7FFF);
1923     tcg_gen_shli_i64(t0, t0, 48);
1924     tcg_gen_or_i64(xth, xth, t0);
1925     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1926     tcg_gen_mov_i64(xtl, xal);
1927     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1929     tcg_temp_free_i64(t0);
1930     tcg_temp_free_i64(xth);
1931     tcg_temp_free_i64(xtl);
1932     tcg_temp_free_i64(xah);
1933     tcg_temp_free_i64(xal);
1934     tcg_temp_free_i64(xbh);
1937 static void gen_xsxsigdp(DisasContext *ctx)
1939     TCGv rt = cpu_gpr[rD(ctx->opcode)];
1940     TCGv_i64 t0, t1, zr, nan, exp;
1942     if (unlikely(!ctx->vsx_enabled)) {
1943         gen_exception(ctx, POWERPC_EXCP_VSXU);
1944         return;
1945     }
1946     exp = tcg_temp_new_i64();
1947     t0 = tcg_temp_new_i64();
1948     t1 = tcg_temp_new_i64();
1949     zr = tcg_const_i64(0);
1950     nan = tcg_const_i64(2047);
1952     get_cpu_vsr(t1, xB(ctx->opcode), true);
1953     tcg_gen_extract_i64(exp, t1, 52, 11);
1954     tcg_gen_movi_i64(t0, 0x0010000000000000);
1955     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1956     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1957     get_cpu_vsr(t1, xB(ctx->opcode), true);
1958     tcg_gen_deposit_i64(rt, t0, t1, 0, 52);
1960     tcg_temp_free_i64(t0);
1961     tcg_temp_free_i64(t1);
1962     tcg_temp_free_i64(exp);
1963     tcg_temp_free_i64(zr);
1964     tcg_temp_free_i64(nan);
1967 static void gen_xsxsigqp(DisasContext *ctx)
1969     TCGv_i64 t0, zr, nan, exp;
1970     TCGv_i64 xth;
1971     TCGv_i64 xtl;
1972     TCGv_i64 xbh;
1973     TCGv_i64 xbl;
1975     if (unlikely(!ctx->vsx_enabled)) {
1976         gen_exception(ctx, POWERPC_EXCP_VSXU);
1977         return;
1978     }
1979     xth = tcg_temp_new_i64();
1980     xtl = tcg_temp_new_i64();
1981     xbh = tcg_temp_new_i64();
1982     xbl = tcg_temp_new_i64();
1983     get_cpu_vsr(xbh, rB(ctx->opcode) + 32, true);
1984     get_cpu_vsr(xbl, rB(ctx->opcode) + 32, false);
1985     exp = tcg_temp_new_i64();
1986     t0 = tcg_temp_new_i64();
1987     zr = tcg_const_i64(0);
1988     nan = tcg_const_i64(32767);
1990     tcg_gen_extract_i64(exp, xbh, 48, 15);
1991     tcg_gen_movi_i64(t0, 0x0001000000000000);
1992     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
1993     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
1994     tcg_gen_deposit_i64(xth, t0, xbh, 0, 48);
1995     set_cpu_vsr(rD(ctx->opcode) + 32, xth, true);
1996     tcg_gen_mov_i64(xtl, xbl);
1997     set_cpu_vsr(rD(ctx->opcode) + 32, xtl, false);
1999     tcg_temp_free_i64(t0);
2000     tcg_temp_free_i64(exp);
2001     tcg_temp_free_i64(zr);
2002     tcg_temp_free_i64(nan);
2003     tcg_temp_free_i64(xth);
2004     tcg_temp_free_i64(xtl);
2005     tcg_temp_free_i64(xbh);
2006     tcg_temp_free_i64(xbl);
2008 #endif
2010 static void gen_xviexpsp(DisasContext *ctx)
2012     TCGv_i64 xth;
2013     TCGv_i64 xtl;
2014     TCGv_i64 xah;
2015     TCGv_i64 xal;
2016     TCGv_i64 xbh;
2017     TCGv_i64 xbl;
2018     TCGv_i64 t0;
2020     if (unlikely(!ctx->vsx_enabled)) {
2021         gen_exception(ctx, POWERPC_EXCP_VSXU);
2022         return;
2023     }
2024     xth = tcg_temp_new_i64();
2025     xtl = tcg_temp_new_i64();
2026     xah = tcg_temp_new_i64();
2027     xal = tcg_temp_new_i64();
2028     xbh = tcg_temp_new_i64();
2029     xbl = tcg_temp_new_i64();
2030     get_cpu_vsr(xah, xA(ctx->opcode), true);
2031     get_cpu_vsr(xal, xA(ctx->opcode), false);
2032     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2033     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2034     t0 = tcg_temp_new_i64();
2036     tcg_gen_andi_i64(xth, xah, 0x807FFFFF807FFFFF);
2037     tcg_gen_andi_i64(t0, xbh, 0xFF000000FF);
2038     tcg_gen_shli_i64(t0, t0, 23);
2039     tcg_gen_or_i64(xth, xth, t0);
2040     set_cpu_vsr(xT(ctx->opcode), xth, true);
2041     tcg_gen_andi_i64(xtl, xal, 0x807FFFFF807FFFFF);
2042     tcg_gen_andi_i64(t0, xbl, 0xFF000000FF);
2043     tcg_gen_shli_i64(t0, t0, 23);
2044     tcg_gen_or_i64(xtl, xtl, t0);
2045     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2047     tcg_temp_free_i64(t0);
2048     tcg_temp_free_i64(xth);
2049     tcg_temp_free_i64(xtl);
2050     tcg_temp_free_i64(xah);
2051     tcg_temp_free_i64(xal);
2052     tcg_temp_free_i64(xbh);
2053     tcg_temp_free_i64(xbl);
2056 static void gen_xviexpdp(DisasContext *ctx)
2058     TCGv_i64 xth;
2059     TCGv_i64 xtl;
2060     TCGv_i64 xah;
2061     TCGv_i64 xal;
2062     TCGv_i64 xbh;
2063     TCGv_i64 xbl;
2065     if (unlikely(!ctx->vsx_enabled)) {
2066         gen_exception(ctx, POWERPC_EXCP_VSXU);
2067         return;
2068     }
2069     xth = tcg_temp_new_i64();
2070     xtl = tcg_temp_new_i64();
2071     xah = tcg_temp_new_i64();
2072     xal = tcg_temp_new_i64();
2073     xbh = tcg_temp_new_i64();
2074     xbl = tcg_temp_new_i64();
2075     get_cpu_vsr(xah, xA(ctx->opcode), true);
2076     get_cpu_vsr(xal, xA(ctx->opcode), false);
2077     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2078     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2080     tcg_gen_deposit_i64(xth, xah, xbh, 52, 11);
2081     set_cpu_vsr(xT(ctx->opcode), xth, true);
2083     tcg_gen_deposit_i64(xtl, xal, xbl, 52, 11);
2084     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2086     tcg_temp_free_i64(xth);
2087     tcg_temp_free_i64(xtl);
2088     tcg_temp_free_i64(xah);
2089     tcg_temp_free_i64(xal);
2090     tcg_temp_free_i64(xbh);
2091     tcg_temp_free_i64(xbl);
2094 static void gen_xvxexpsp(DisasContext *ctx)
2096     TCGv_i64 xth;
2097     TCGv_i64 xtl;
2098     TCGv_i64 xbh;
2099     TCGv_i64 xbl;
2101     if (unlikely(!ctx->vsx_enabled)) {
2102         gen_exception(ctx, POWERPC_EXCP_VSXU);
2103         return;
2104     }
2105     xth = tcg_temp_new_i64();
2106     xtl = tcg_temp_new_i64();
2107     xbh = tcg_temp_new_i64();
2108     xbl = tcg_temp_new_i64();
2109     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2110     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2112     tcg_gen_shri_i64(xth, xbh, 23);
2113     tcg_gen_andi_i64(xth, xth, 0xFF000000FF);
2114     set_cpu_vsr(xT(ctx->opcode), xth, true);
2115     tcg_gen_shri_i64(xtl, xbl, 23);
2116     tcg_gen_andi_i64(xtl, xtl, 0xFF000000FF);
2117     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2119     tcg_temp_free_i64(xth);
2120     tcg_temp_free_i64(xtl);
2121     tcg_temp_free_i64(xbh);
2122     tcg_temp_free_i64(xbl);
2125 static void gen_xvxexpdp(DisasContext *ctx)
2127     TCGv_i64 xth;
2128     TCGv_i64 xtl;
2129     TCGv_i64 xbh;
2130     TCGv_i64 xbl;
2132     if (unlikely(!ctx->vsx_enabled)) {
2133         gen_exception(ctx, POWERPC_EXCP_VSXU);
2134         return;
2135     }
2136     xth = tcg_temp_new_i64();
2137     xtl = tcg_temp_new_i64();
2138     xbh = tcg_temp_new_i64();
2139     xbl = tcg_temp_new_i64();
2140     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2141     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2143     tcg_gen_extract_i64(xth, xbh, 52, 11);
2144     set_cpu_vsr(xT(ctx->opcode), xth, true);
2145     tcg_gen_extract_i64(xtl, xbl, 52, 11);
2146     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2148     tcg_temp_free_i64(xth);
2149     tcg_temp_free_i64(xtl);
2150     tcg_temp_free_i64(xbh);
2151     tcg_temp_free_i64(xbl);
2154 GEN_VSX_HELPER_X2(xvxsigsp, 0x00, 0x04, 0, PPC2_ISA300)
2156 static void gen_xvxsigdp(DisasContext *ctx)
2158     TCGv_i64 xth;
2159     TCGv_i64 xtl;
2160     TCGv_i64 xbh;
2161     TCGv_i64 xbl;
2162     TCGv_i64 t0, zr, nan, exp;
2164     if (unlikely(!ctx->vsx_enabled)) {
2165         gen_exception(ctx, POWERPC_EXCP_VSXU);
2166         return;
2167     }
2168     xth = tcg_temp_new_i64();
2169     xtl = tcg_temp_new_i64();
2170     xbh = tcg_temp_new_i64();
2171     xbl = tcg_temp_new_i64();
2172     get_cpu_vsr(xbh, xB(ctx->opcode), true);
2173     get_cpu_vsr(xbl, xB(ctx->opcode), false);
2174     exp = tcg_temp_new_i64();
2175     t0 = tcg_temp_new_i64();
2176     zr = tcg_const_i64(0);
2177     nan = tcg_const_i64(2047);
2179     tcg_gen_extract_i64(exp, xbh, 52, 11);
2180     tcg_gen_movi_i64(t0, 0x0010000000000000);
2181     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2182     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2183     tcg_gen_deposit_i64(xth, t0, xbh, 0, 52);
2184     set_cpu_vsr(xT(ctx->opcode), xth, true);
2186     tcg_gen_extract_i64(exp, xbl, 52, 11);
2187     tcg_gen_movi_i64(t0, 0x0010000000000000);
2188     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, zr, zr, t0);
2189     tcg_gen_movcond_i64(TCG_COND_EQ, t0, exp, nan, zr, t0);
2190     tcg_gen_deposit_i64(xtl, t0, xbl, 0, 52);
2191     set_cpu_vsr(xT(ctx->opcode), xtl, false);
2193     tcg_temp_free_i64(t0);
2194     tcg_temp_free_i64(exp);
2195     tcg_temp_free_i64(zr);
2196     tcg_temp_free_i64(nan);
2197     tcg_temp_free_i64(xth);
2198     tcg_temp_free_i64(xtl);
2199     tcg_temp_free_i64(xbh);
2200     tcg_temp_free_i64(xbl);
2203 static bool do_lstxv(DisasContext *ctx, int ra, TCGv displ,
2204                      int rt, bool store, bool paired)
2206     TCGv ea;
2207     TCGv_i64 xt;
2208     MemOp mop;
2209     int rt1, rt2;
2211     xt = tcg_temp_new_i64();
2213     mop = DEF_MEMOP(MO_UQ);
2215     gen_set_access_type(ctx, ACCESS_INT);
2216     ea = do_ea_calc(ctx, ra, displ);
2218     if (paired && ctx->le_mode) {
2219         rt1 = rt + 1;
2220         rt2 = rt;
2221     } else {
2222         rt1 = rt;
2223         rt2 = rt + 1;
2224     }
2226     if (store) {
2227         get_cpu_vsr(xt, rt1, !ctx->le_mode);
2228         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2229         gen_addr_add(ctx, ea, ea, 8);
2230         get_cpu_vsr(xt, rt1, ctx->le_mode);
2231         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2232         if (paired) {
2233             gen_addr_add(ctx, ea, ea, 8);
2234             get_cpu_vsr(xt, rt2, !ctx->le_mode);
2235             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2236             gen_addr_add(ctx, ea, ea, 8);
2237             get_cpu_vsr(xt, rt2, ctx->le_mode);
2238             tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2239         }
2240     } else {
2241         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2242         set_cpu_vsr(rt1, xt, !ctx->le_mode);
2243         gen_addr_add(ctx, ea, ea, 8);
2244         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2245         set_cpu_vsr(rt1, xt, ctx->le_mode);
2246         if (paired) {
2247             gen_addr_add(ctx, ea, ea, 8);
2248             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2249             set_cpu_vsr(rt2, xt, !ctx->le_mode);
2250             gen_addr_add(ctx, ea, ea, 8);
2251             tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2252             set_cpu_vsr(rt2, xt, ctx->le_mode);
2253         }
2254     }
2256     tcg_temp_free(ea);
2257     tcg_temp_free_i64(xt);
2258     return true;
2261 static bool do_lstxv_D(DisasContext *ctx, arg_D *a, bool store, bool paired)
2263     if (paired || a->rt >= 32) {
2264         REQUIRE_VSX(ctx);
2265     } else {
2266         REQUIRE_VECTOR(ctx);
2267     }
2269     return do_lstxv(ctx, a->ra, tcg_constant_tl(a->si), a->rt, store, paired);
2272 static bool do_lstxv_PLS_D(DisasContext *ctx, arg_PLS_D *a,
2273                            bool store, bool paired)
2275     arg_D d;
2276     REQUIRE_VSX(ctx);
2278     if (!resolve_PLS_D(ctx, &d, a)) {
2279         return true;
2280     }
2282     return do_lstxv(ctx, d.ra, tcg_constant_tl(d.si), d.rt, store, paired);
2285 static bool do_lstxv_X(DisasContext *ctx, arg_X *a, bool store, bool paired)
2287     if (paired || a->rt >= 32) {
2288         REQUIRE_VSX(ctx);
2289     } else {
2290         REQUIRE_VECTOR(ctx);
2291     }
2293     return do_lstxv(ctx, a->ra, cpu_gpr[a->rb], a->rt, store, paired);
2296 static bool do_lstxsd(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2298     TCGv ea;
2299     TCGv_i64 xt;
2300     MemOp mop;
2302     if (store) {
2303         REQUIRE_VECTOR(ctx);
2304     } else {
2305         REQUIRE_VSX(ctx);
2306     }
2308     xt = tcg_temp_new_i64();
2309     mop = DEF_MEMOP(MO_UQ);
2311     gen_set_access_type(ctx, ACCESS_INT);
2312     ea = do_ea_calc(ctx, ra, displ);
2314     if (store) {
2315         get_cpu_vsr(xt, rt + 32, true);
2316         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2317     } else {
2318         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2319         set_cpu_vsr(rt + 32, xt, true);
2320         set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2321     }
2323     tcg_temp_free(ea);
2324     tcg_temp_free_i64(xt);
2326     return true;
2329 static bool do_lstxsd_DS(DisasContext *ctx, arg_D *a, bool store)
2331     return do_lstxsd(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2334 static bool do_plstxsd_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2336     arg_D d;
2338     if (!resolve_PLS_D(ctx, &d, a)) {
2339         return true;
2340     }
2342     return do_lstxsd(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2345 static bool do_lstxssp(DisasContext *ctx, int rt, int ra, TCGv displ, bool store)
2347     TCGv ea;
2348     TCGv_i64 xt;
2350     REQUIRE_VECTOR(ctx);
2352     xt = tcg_temp_new_i64();
2354     gen_set_access_type(ctx, ACCESS_INT);
2355     ea = do_ea_calc(ctx, ra, displ);
2357     if (store) {
2358         get_cpu_vsr(xt, rt + 32, true);
2359         gen_qemu_st32fs(ctx, xt, ea);
2360     } else {
2361         gen_qemu_ld32fs(ctx, xt, ea);
2362         set_cpu_vsr(rt + 32, xt, true);
2363         set_cpu_vsr(rt + 32, tcg_constant_i64(0), false);
2364     }
2366     tcg_temp_free(ea);
2367     tcg_temp_free_i64(xt);
2369     return true;
2372 static bool do_lstxssp_DS(DisasContext *ctx, arg_D *a, bool store)
2374     return do_lstxssp(ctx, a->rt, a->ra, tcg_constant_tl(a->si), store);
2377 static bool do_plstxssp_PLS_D(DisasContext *ctx, arg_PLS_D *a, bool store)
2379     arg_D d;
2381     if (!resolve_PLS_D(ctx, &d, a)) {
2382         return true;
2383     }
2385     return do_lstxssp(ctx, d.rt, d.ra, tcg_constant_tl(d.si), store);
2388 TRANS_FLAGS2(ISA300, LXSD, do_lstxsd_DS, false)
2389 TRANS_FLAGS2(ISA300, STXSD, do_lstxsd_DS, true)
2390 TRANS_FLAGS2(ISA300, LXSSP, do_lstxssp_DS, false)
2391 TRANS_FLAGS2(ISA300, STXSSP, do_lstxssp_DS, true)
2392 TRANS_FLAGS2(ISA300, STXV, do_lstxv_D, true, false)
2393 TRANS_FLAGS2(ISA300, LXV, do_lstxv_D, false, false)
2394 TRANS_FLAGS2(ISA310, STXVP, do_lstxv_D, true, true)
2395 TRANS_FLAGS2(ISA310, LXVP, do_lstxv_D, false, true)
2396 TRANS_FLAGS2(ISA300, STXVX, do_lstxv_X, true, false)
2397 TRANS_FLAGS2(ISA300, LXVX, do_lstxv_X, false, false)
2398 TRANS_FLAGS2(ISA310, STXVPX, do_lstxv_X, true, true)
2399 TRANS_FLAGS2(ISA310, LXVPX, do_lstxv_X, false, true)
2400 TRANS64_FLAGS2(ISA310, PLXSD, do_plstxsd_PLS_D, false)
2401 TRANS64_FLAGS2(ISA310, PSTXSD, do_plstxsd_PLS_D, true)
2402 TRANS64_FLAGS2(ISA310, PLXSSP, do_plstxssp_PLS_D, false)
2403 TRANS64_FLAGS2(ISA310, PSTXSSP, do_plstxssp_PLS_D, true)
2404 TRANS64_FLAGS2(ISA310, PSTXV, do_lstxv_PLS_D, true, false)
2405 TRANS64_FLAGS2(ISA310, PLXV, do_lstxv_PLS_D, false, false)
2406 TRANS64_FLAGS2(ISA310, PSTXVP, do_lstxv_PLS_D, true, true)
2407 TRANS64_FLAGS2(ISA310, PLXVP, do_lstxv_PLS_D, false, true)
2409 static bool do_lstrm(DisasContext *ctx, arg_X *a, MemOp mop, bool store)
2411     TCGv ea;
2412     TCGv_i64 xt;
2414     REQUIRE_VSX(ctx);
2416     xt = tcg_temp_new_i64();
2418     gen_set_access_type(ctx, ACCESS_INT);
2419     ea = do_ea_calc(ctx, a->ra , cpu_gpr[a->rb]);
2421     if (store) {
2422         get_cpu_vsr(xt, a->rt, false);
2423         tcg_gen_qemu_st_i64(xt, ea, ctx->mem_idx, mop);
2424     } else {
2425         tcg_gen_qemu_ld_i64(xt, ea, ctx->mem_idx, mop);
2426         set_cpu_vsr(a->rt, xt, false);
2427         set_cpu_vsr(a->rt, tcg_constant_i64(0), true);
2428     }
2430     tcg_temp_free(ea);
2431     tcg_temp_free_i64(xt);
2432     return true;
2435 TRANS_FLAGS2(ISA310, LXVRBX, do_lstrm, DEF_MEMOP(MO_UB), false)
2436 TRANS_FLAGS2(ISA310, LXVRHX, do_lstrm, DEF_MEMOP(MO_UW), false)
2437 TRANS_FLAGS2(ISA310, LXVRWX, do_lstrm, DEF_MEMOP(MO_UL), false)
2438 TRANS_FLAGS2(ISA310, LXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), false)
2439 TRANS_FLAGS2(ISA310, STXVRBX, do_lstrm, DEF_MEMOP(MO_UB), true)
2440 TRANS_FLAGS2(ISA310, STXVRHX, do_lstrm, DEF_MEMOP(MO_UW), true)
2441 TRANS_FLAGS2(ISA310, STXVRWX, do_lstrm, DEF_MEMOP(MO_UL), true)
2442 TRANS_FLAGS2(ISA310, STXVRDX, do_lstrm, DEF_MEMOP(MO_UQ), true)
2444 static void gen_xxeval_i64(TCGv_i64 t, TCGv_i64 a, TCGv_i64 b, TCGv_i64 c,
2445                            int64_t imm)
2447     /*
2448      * Instead of processing imm bit-by-bit, we'll skip the computation of
2449      * conjunctions whose corresponding bit is unset.
2450      */
2451     int bit;
2452     TCGv_i64 conj, disj;
2454     conj = tcg_temp_new_i64();
2455     disj = tcg_const_i64(0);
2457     /* Iterate over set bits from the least to the most significant bit */
2458     while (imm) {
2459         /*
2460          * Get the next bit to be processed with ctz64. Invert the result of
2461          * ctz64 to match the indexing used by PowerISA.
2462          */
2463         bit = 7 - ctz64(imm);
2464         if (bit & 0x4) {
2465             tcg_gen_mov_i64(conj, a);
2466         } else {
2467             tcg_gen_not_i64(conj, a);
2468         }
2469         if (bit & 0x2) {
2470             tcg_gen_and_i64(conj, conj, b);
2471         } else {
2472             tcg_gen_andc_i64(conj, conj, b);
2473         }
2474         if (bit & 0x1) {
2475             tcg_gen_and_i64(conj, conj, c);
2476         } else {
2477             tcg_gen_andc_i64(conj, conj, c);
2478         }
2479         tcg_gen_or_i64(disj, disj, conj);
2481         /* Unset the least significant bit that is set */
2482         imm &= imm - 1;
2483     }
2485     tcg_gen_mov_i64(t, disj);
2487     tcg_temp_free_i64(conj);
2488     tcg_temp_free_i64(disj);
2491 static void gen_xxeval_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2492                            TCGv_vec c, int64_t imm)
2494     /*
2495      * Instead of processing imm bit-by-bit, we'll skip the computation of
2496      * conjunctions whose corresponding bit is unset.
2497      */
2498     int bit;
2499     TCGv_vec disj, conj;
2501     disj = tcg_const_zeros_vec_matching(t);
2502     conj = tcg_temp_new_vec_matching(t);
2504     /* Iterate over set bits from the least to the most significant bit */
2505     while (imm) {
2506         /*
2507          * Get the next bit to be processed with ctz64. Invert the result of
2508          * ctz64 to match the indexing used by PowerISA.
2509          */
2510         bit = 7 - ctz64(imm);
2511         if (bit & 0x4) {
2512             tcg_gen_mov_vec(conj, a);
2513         } else {
2514             tcg_gen_not_vec(vece, conj, a);
2515         }
2516         if (bit & 0x2) {
2517             tcg_gen_and_vec(vece, conj, conj, b);
2518         } else {
2519             tcg_gen_andc_vec(vece, conj, conj, b);
2520         }
2521         if (bit & 0x1) {
2522             tcg_gen_and_vec(vece, conj, conj, c);
2523         } else {
2524             tcg_gen_andc_vec(vece, conj, conj, c);
2525         }
2526         tcg_gen_or_vec(vece, disj, disj, conj);
2528         /* Unset the least significant bit that is set */
2529         imm &= imm - 1;
2530     }
2532     tcg_gen_mov_vec(t, disj);
2534     tcg_temp_free_vec(disj);
2535     tcg_temp_free_vec(conj);
2538 static bool trans_XXEVAL(DisasContext *ctx, arg_8RR_XX4_imm *a)
2540     static const TCGOpcode vecop_list[] = {
2541         INDEX_op_andc_vec, 0
2542     };
2543     static const GVecGen4i op = {
2544         .fniv = gen_xxeval_vec,
2545         .fno = gen_helper_XXEVAL,
2546         .fni8 = gen_xxeval_i64,
2547         .opt_opc = vecop_list,
2548         .vece = MO_64
2549     };
2550     int xt = vsr_full_offset(a->xt), xa = vsr_full_offset(a->xa),
2551         xb = vsr_full_offset(a->xb), xc = vsr_full_offset(a->xc);
2553     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2554     REQUIRE_VSX(ctx);
2556     /* Equivalent functions that can be implemented with a single gen_gvec */
2557     switch (a->imm) {
2558     case 0b00000000: /* true */
2559         set_cpu_vsr(a->xt, tcg_constant_i64(0), true);
2560         set_cpu_vsr(a->xt, tcg_constant_i64(0), false);
2561         break;
2562     case 0b00000011: /* and(B,A) */
2563         tcg_gen_gvec_and(MO_64, xt, xb, xa, 16, 16);
2564         break;
2565     case 0b00000101: /* and(C,A) */
2566         tcg_gen_gvec_and(MO_64, xt, xc, xa, 16, 16);
2567         break;
2568     case 0b00001111: /* A */
2569         tcg_gen_gvec_mov(MO_64, xt, xa, 16, 16);
2570         break;
2571     case 0b00010001: /* and(C,B) */
2572         tcg_gen_gvec_and(MO_64, xt, xc, xb, 16, 16);
2573         break;
2574     case 0b00011011: /* C?B:A */
2575         tcg_gen_gvec_bitsel(MO_64, xt, xc, xb, xa, 16, 16);
2576         break;
2577     case 0b00011101: /* B?C:A */
2578         tcg_gen_gvec_bitsel(MO_64, xt, xb, xc, xa, 16, 16);
2579         break;
2580     case 0b00100111: /* C?A:B */
2581         tcg_gen_gvec_bitsel(MO_64, xt, xc, xa, xb, 16, 16);
2582         break;
2583     case 0b00110011: /* B */
2584         tcg_gen_gvec_mov(MO_64, xt, xb, 16, 16);
2585         break;
2586     case 0b00110101: /* A?C:B */
2587         tcg_gen_gvec_bitsel(MO_64, xt, xa, xc, xb, 16, 16);
2588         break;
2589     case 0b00111100: /* xor(B,A) */
2590         tcg_gen_gvec_xor(MO_64, xt, xb, xa, 16, 16);
2591         break;
2592     case 0b00111111: /* or(B,A) */
2593         tcg_gen_gvec_or(MO_64, xt, xb, xa, 16, 16);
2594         break;
2595     case 0b01000111: /* B?A:C */
2596         tcg_gen_gvec_bitsel(MO_64, xt, xb, xa, xc, 16, 16);
2597         break;
2598     case 0b01010011: /* A?B:C */
2599         tcg_gen_gvec_bitsel(MO_64, xt, xa, xb, xc, 16, 16);
2600         break;
2601     case 0b01010101: /* C */
2602         tcg_gen_gvec_mov(MO_64, xt, xc, 16, 16);
2603         break;
2604     case 0b01011010: /* xor(C,A) */
2605         tcg_gen_gvec_xor(MO_64, xt, xc, xa, 16, 16);
2606         break;
2607     case 0b01011111: /* or(C,A) */
2608         tcg_gen_gvec_or(MO_64, xt, xc, xa, 16, 16);
2609         break;
2610     case 0b01100110: /* xor(C,B) */
2611         tcg_gen_gvec_xor(MO_64, xt, xc, xb, 16, 16);
2612         break;
2613     case 0b01110111: /* or(C,B) */
2614         tcg_gen_gvec_or(MO_64, xt, xc, xb, 16, 16);
2615         break;
2616     case 0b10001000: /* nor(C,B) */
2617         tcg_gen_gvec_nor(MO_64, xt, xc, xb, 16, 16);
2618         break;
2619     case 0b10011001: /* eqv(C,B) */
2620         tcg_gen_gvec_eqv(MO_64, xt, xc, xb, 16, 16);
2621         break;
2622     case 0b10100000: /* nor(C,A) */
2623         tcg_gen_gvec_nor(MO_64, xt, xc, xa, 16, 16);
2624         break;
2625     case 0b10100101: /* eqv(C,A) */
2626         tcg_gen_gvec_eqv(MO_64, xt, xc, xa, 16, 16);
2627         break;
2628     case 0b10101010: /* not(C) */
2629         tcg_gen_gvec_not(MO_64, xt, xc, 16, 16);
2630         break;
2631     case 0b11000000: /* nor(B,A) */
2632         tcg_gen_gvec_nor(MO_64, xt,  xb, xa, 16, 16);
2633         break;
2634     case 0b11000011: /* eqv(B,A) */
2635         tcg_gen_gvec_eqv(MO_64, xt,  xb, xa, 16, 16);
2636         break;
2637     case 0b11001100: /* not(B) */
2638         tcg_gen_gvec_not(MO_64, xt, xb, 16, 16);
2639         break;
2640     case 0b11101110: /* nand(C,B) */
2641         tcg_gen_gvec_nand(MO_64, xt, xc, xb, 16, 16);
2642         break;
2643     case 0b11110000: /* not(A) */
2644         tcg_gen_gvec_not(MO_64, xt, xa, 16, 16);
2645         break;
2646     case 0b11111010: /* nand(C,A) */
2647         tcg_gen_gvec_nand(MO_64, xt, xc, xa, 16, 16);
2648         break;
2649     case 0b11111100: /* nand(B,A) */
2650         tcg_gen_gvec_nand(MO_64, xt, xb, xa, 16, 16);
2651         break;
2652     case 0b11111111: /* true */
2653         set_cpu_vsr(a->xt, tcg_constant_i64(-1), true);
2654         set_cpu_vsr(a->xt, tcg_constant_i64(-1), false);
2655         break;
2656     default:
2657         /* Fallback to compute all conjunctions/disjunctions */
2658         tcg_gen_gvec_4i(xt, xa, xb, xc, 16, 16, a->imm, &op);
2659     }
2661     return true;
2664 static void gen_xxblendv_vec(unsigned vece, TCGv_vec t, TCGv_vec a, TCGv_vec b,
2665                              TCGv_vec c)
2667     TCGv_vec tmp = tcg_temp_new_vec_matching(c);
2668     tcg_gen_sari_vec(vece, tmp, c, (8 << vece) - 1);
2669     tcg_gen_bitsel_vec(vece, t, tmp, b, a);
2670     tcg_temp_free_vec(tmp);
2673 static bool do_xxblendv(DisasContext *ctx, arg_8RR_XX4 *a, unsigned vece)
2675     static const TCGOpcode vecop_list[] = {
2676         INDEX_op_sari_vec, 0
2677     };
2678     static const GVecGen4 ops[4] = {
2679         {
2680             .fniv = gen_xxblendv_vec,
2681             .fno = gen_helper_XXBLENDVB,
2682             .opt_opc = vecop_list,
2683             .vece = MO_8
2684         },
2685         {
2686             .fniv = gen_xxblendv_vec,
2687             .fno = gen_helper_XXBLENDVH,
2688             .opt_opc = vecop_list,
2689             .vece = MO_16
2690         },
2691         {
2692             .fniv = gen_xxblendv_vec,
2693             .fno = gen_helper_XXBLENDVW,
2694             .opt_opc = vecop_list,
2695             .vece = MO_32
2696         },
2697         {
2698             .fniv = gen_xxblendv_vec,
2699             .fno = gen_helper_XXBLENDVD,
2700             .opt_opc = vecop_list,
2701             .vece = MO_64
2702         }
2703     };
2705     REQUIRE_VSX(ctx);
2707     tcg_gen_gvec_4(vsr_full_offset(a->xt), vsr_full_offset(a->xa),
2708                    vsr_full_offset(a->xb), vsr_full_offset(a->xc),
2709                    16, 16, &ops[vece]);
2711     return true;
2714 TRANS(XXBLENDVB, do_xxblendv, MO_8)
2715 TRANS(XXBLENDVH, do_xxblendv, MO_16)
2716 TRANS(XXBLENDVW, do_xxblendv, MO_32)
2717 TRANS(XXBLENDVD, do_xxblendv, MO_64)
2719 static bool do_helper_XX3(DisasContext *ctx, arg_XX3 *a,
2720     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2722     TCGv_ptr xt, xa, xb;
2724     REQUIRE_INSNS_FLAGS2(ctx, ISA300);
2725     REQUIRE_VSX(ctx);
2727     xt = gen_vsr_ptr(a->xt);
2728     xa = gen_vsr_ptr(a->xa);
2729     xb = gen_vsr_ptr(a->xb);
2731     helper(cpu_env, xt, xa, xb);
2733     tcg_temp_free_ptr(xt);
2734     tcg_temp_free_ptr(xa);
2735     tcg_temp_free_ptr(xb);
2737     return true;
2740 TRANS(XSCMPEQDP, do_helper_XX3, gen_helper_XSCMPEQDP)
2741 TRANS(XSCMPGEDP, do_helper_XX3, gen_helper_XSCMPGEDP)
2742 TRANS(XSCMPGTDP, do_helper_XX3, gen_helper_XSCMPGTDP)
2743 TRANS(XSMAXCDP, do_helper_XX3, gen_helper_XSMAXCDP)
2744 TRANS(XSMINCDP, do_helper_XX3, gen_helper_XSMINCDP)
2745 TRANS(XSMAXJDP, do_helper_XX3, gen_helper_XSMAXJDP)
2746 TRANS(XSMINJDP, do_helper_XX3, gen_helper_XSMINJDP)
2748 static bool do_helper_X(arg_X *a,
2749     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2751     TCGv_ptr rt, ra, rb;
2753     rt = gen_avr_ptr(a->rt);
2754     ra = gen_avr_ptr(a->ra);
2755     rb = gen_avr_ptr(a->rb);
2757     helper(cpu_env, rt, ra, rb);
2759     tcg_temp_free_ptr(rt);
2760     tcg_temp_free_ptr(ra);
2761     tcg_temp_free_ptr(rb);
2763     return true;
2766 static bool do_xscmpqp(DisasContext *ctx, arg_X *a,
2767     void (*helper)(TCGv_ptr, TCGv_ptr, TCGv_ptr, TCGv_ptr))
2769     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2770     REQUIRE_VSX(ctx);
2772     return do_helper_X(a, helper);
2775 TRANS(XSCMPEQQP, do_xscmpqp, gen_helper_XSCMPEQQP)
2776 TRANS(XSCMPGEQP, do_xscmpqp, gen_helper_XSCMPGEQP)
2777 TRANS(XSCMPGTQP, do_xscmpqp, gen_helper_XSCMPGTQP)
2778 TRANS(XSMAXCQP, do_xscmpqp, gen_helper_XSMAXCQP)
2779 TRANS(XSMINCQP, do_xscmpqp, gen_helper_XSMINCQP)
2781 static bool trans_XVCVSPBF16(DisasContext *ctx, arg_XX2 *a)
2783     TCGv_ptr xt, xb;
2785     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2786     REQUIRE_VSX(ctx);
2788     xt = gen_vsr_ptr(a->xt);
2789     xb = gen_vsr_ptr(a->xb);
2791     gen_helper_XVCVSPBF16(cpu_env, xt, xb);
2793     tcg_temp_free_ptr(xt);
2794     tcg_temp_free_ptr(xb);
2796     return true;
2799 static bool trans_XVCVBF16SPN(DisasContext *ctx, arg_XX2 *a)
2801     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
2802     REQUIRE_VSX(ctx);
2804     tcg_gen_gvec_shli(MO_32, vsr_full_offset(a->xt), vsr_full_offset(a->xb),
2805                       16, 16, 16);
2807     return true;
2810 #undef GEN_XX2FORM
2811 #undef GEN_XX3FORM
2812 #undef GEN_XX2IFORM
2813 #undef GEN_XX3_RC_FORM
2814 #undef GEN_XX3FORM_DM
2815 #undef VSX_LOGICAL